// Copyright 2015-2025 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//

// This header is generated from the Khronos Vulkan XML API Registry.

#ifndef VULKAN_STRUCTS_HPP
#define VULKAN_STRUCTS_HPP

// include-what-you-use: make sure, vulkan.hpp is used by code-completers
// IWYU pragma: private, include "vulkan/vulkan.hpp"

#if !defined( VULKAN_HPP_CXX_MODULE )
#  include <cstring>  // strcmp
#endif

namespace VULKAN_HPP_NAMESPACE
{

  //===============
  //=== STRUCTS ===
  //===============

  // wrapper struct for struct VkAabbPositionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAabbPositionsKHR.html
  struct AabbPositionsKHR
  {
    using NativeType = VkAabbPositionsKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT
      : minX{ minX_ }
      , minY{ minY_ }
      , minZ{ minZ_ }
      , maxX{ maxX_ }
      , maxY{ maxY_ }
      , maxZ{ maxZ_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) ) {}

    AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AabbPositionsKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
    {
      minX = minX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
    {
      minY = minY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
    {
      minZ = minZ_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
    {
      maxX = maxX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
    {
      maxY = maxY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
    {
      maxZ = maxZ_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAabbPositionsKHR *>( this );
    }

    operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAabbPositionsKHR *>( this );
    }

    operator VkAabbPositionsKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAabbPositionsKHR *>( this );
    }

    operator VkAabbPositionsKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAabbPositionsKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &, float const &, float const &, float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( minX, minY, minZ, maxX, maxY, maxZ );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AabbPositionsKHR const & ) const = default;
#else
    bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ );
#  endif
    }

    bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float minX = {};
    float minY = {};
    float minZ = {};
    float maxX = {};
    float maxY = {};
    float maxZ = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAabbPositionsKHR>
  {
    using Type = AabbPositionsKHR;
  };
#endif
  using AabbPositionsNV = AabbPositionsKHR;

  union DeviceOrHostAddressConstKHR
  {
    using NativeType = VkDeviceOrHostAddressConstKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hostAddress = hostAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceOrHostAddressConstKHR const &() const
    {
      return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR *>( this );
    }

    operator VkDeviceOrHostAddressConstKHR &()
    {
      return *reinterpret_cast<VkDeviceOrHostAddressConstKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    DeviceAddress deviceAddress;
    const void *  hostAddress;
#else
    VkDeviceAddress deviceAddress;
    const void *    hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceOrHostAddressConstKHR>
  {
    using Type = DeviceOrHostAddressConstKHR;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureGeometryTrianglesDataKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryTrianglesDataKHR.html
  struct AccelerationStructureGeometryTrianglesDataKHR
  {
    using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( Format                      vertexFormat_  = Format::eUndefined,
                                                                           DeviceOrHostAddressConstKHR vertexData_    = {},
                                                                           DeviceSize                  vertexStride_  = {},
                                                                           uint32_t                    maxVertex_     = {},
                                                                           IndexType                   indexType_     = IndexType::eUint16,
                                                                           DeviceOrHostAddressConstKHR indexData_     = {},
                                                                           DeviceOrHostAddressConstKHR transformData_ = {},
                                                                           const void *                pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexFormat{ vertexFormat_ }
      , vertexData{ vertexData_ }
      , vertexStride{ vertexStride_ }
      , maxVertex{ maxVertex_ }
      , indexType{ indexType_ }
      , indexData{ indexData_ }
      , transformData{ transformData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      maxVertex = maxVertex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &
      setTransformData( DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
    {
      transformData = transformData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryTrianglesDataKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryTrianglesDataKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               uint32_t const &,
               IndexType const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceOrHostAddressConstKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
    }
#endif

  public:
    StructureType               sType         = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
    const void *                pNext         = {};
    Format                      vertexFormat  = Format::eUndefined;
    DeviceOrHostAddressConstKHR vertexData    = {};
    DeviceSize                  vertexStride  = {};
    uint32_t                    maxVertex     = {};
    IndexType                   indexType     = IndexType::eUint16;
    DeviceOrHostAddressConstKHR indexData     = {};
    DeviceOrHostAddressConstKHR transformData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryTrianglesDataKHR>
  {
    using Type = AccelerationStructureGeometryTrianglesDataKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
  {
    using Type = AccelerationStructureGeometryTrianglesDataKHR;
  };

  // wrapper struct for struct VkAccelerationStructureGeometryAabbsDataKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryAabbsDataKHR.html
  struct AccelerationStructureGeometryAabbsDataKHR
  {
    using NativeType = VkAccelerationStructureGeometryAabbsDataKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryAabbsDataKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( DeviceOrHostAddressConstKHR data_   = {},
                                                                       DeviceSize                  stride_ = {},
                                                                       const void *                pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , data{ data_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData( DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryAabbsDataKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryAabbsDataKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceOrHostAddressConstKHR const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, data, stride );
    }
#endif

  public:
    StructureType               sType  = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
    const void *                pNext  = {};
    DeviceOrHostAddressConstKHR data   = {};
    DeviceSize                  stride = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryAabbsDataKHR>
  {
    using Type = AccelerationStructureGeometryAabbsDataKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
  {
    using Type = AccelerationStructureGeometryAabbsDataKHR;
  };

  // wrapper struct for struct VkAccelerationStructureGeometryInstancesDataKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryInstancesDataKHR.html
  struct AccelerationStructureGeometryInstancesDataKHR
  {
    using NativeType = VkAccelerationStructureGeometryInstancesDataKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryInstancesDataKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( Bool32                      arrayOfPointers_ = {},
                                                                           DeviceOrHostAddressConstKHR data_            = {},
                                                                           const void *                pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , arrayOfPointers{ arrayOfPointers_ }
      , data{ data_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayOfPointers = arrayOfPointers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData( DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryInstancesDataKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryInstancesDataKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, DeviceOrHostAddressConstKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, arrayOfPointers, data );
    }
#endif

  public:
    StructureType               sType           = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
    const void *                pNext           = {};
    Bool32                      arrayOfPointers = {};
    DeviceOrHostAddressConstKHR data            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryInstancesDataKHR>
  {
    using Type = AccelerationStructureGeometryInstancesDataKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
  {
    using Type = AccelerationStructureGeometryInstancesDataKHR;
  };

  union AccelerationStructureGeometryDataKHR
  {
    using NativeType = VkAccelerationStructureGeometryDataKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) : triangles( triangles_ ) {}

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryAabbsDataKHR aabbs_ ) : aabbs( aabbs_ ) {}

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryInstancesDataKHR instances_ ) : instances( instances_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR &
      setTriangles( AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
    {
      triangles = triangles_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs( AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbs = aabbs_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR &
      setInstances( AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
    {
      instances = instances_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryDataKHR const &() const
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryDataKHR &()
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    AccelerationStructureGeometryTrianglesDataKHR triangles;
    AccelerationStructureGeometryAabbsDataKHR     aabbs;
    AccelerationStructureGeometryInstancesDataKHR instances;
#else
    VkAccelerationStructureGeometryTrianglesDataKHR triangles;
    VkAccelerationStructureGeometryAabbsDataKHR     aabbs;
    VkAccelerationStructureGeometryInstancesDataKHR instances;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryDataKHR>
  {
    using Type = AccelerationStructureGeometryDataKHR;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureGeometryKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryKHR.html
  struct AccelerationStructureGeometryKHR
  {
    using NativeType = VkAccelerationStructureGeometryKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( GeometryTypeKHR                      geometryType_ = GeometryTypeKHR::eTriangles,
                                                              AccelerationStructureGeometryDataKHR geometry_     = {},
                                                              GeometryFlagsKHR                     flags_        = {},
                                                              const void *                         pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , geometryType{ geometryType_ }
      , geometry{ geometry_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryType = geometryType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry( AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
    {
      geometry = geometry_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryKHR *>( this );
    }

    operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryKHR *>( this );
    }

    operator VkAccelerationStructureGeometryKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryKHR *>( this );
    }

    operator VkAccelerationStructureGeometryKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, GeometryTypeKHR const &, AccelerationStructureGeometryDataKHR const &, GeometryFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, geometryType, geometry, flags );
    }
#endif

  public:
    StructureType                        sType        = StructureType::eAccelerationStructureGeometryKHR;
    const void *                         pNext        = {};
    GeometryTypeKHR                      geometryType = GeometryTypeKHR::eTriangles;
    AccelerationStructureGeometryDataKHR geometry     = {};
    GeometryFlagsKHR                     flags        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryKHR>
  {
    using Type = AccelerationStructureGeometryKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
  {
    using Type = AccelerationStructureGeometryKHR;
  };

  union DeviceOrHostAddressKHR
  {
    using NativeType = VkDeviceOrHostAddressKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hostAddress = hostAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceOrHostAddressKHR const &() const
    {
      return *reinterpret_cast<const VkDeviceOrHostAddressKHR *>( this );
    }

    operator VkDeviceOrHostAddressKHR &()
    {
      return *reinterpret_cast<VkDeviceOrHostAddressKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    DeviceAddress deviceAddress;
    void *        hostAddress;
#else
    VkDeviceAddress deviceAddress;
    void *          hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceOrHostAddressKHR>
  {
    using Type = DeviceOrHostAddressKHR;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureBuildGeometryInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildGeometryInfoKHR.html
  struct AccelerationStructureBuildGeometryInfoKHR
  {
    using NativeType = VkAccelerationStructureBuildGeometryInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureBuildGeometryInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureTypeKHR       type_  = AccelerationStructureTypeKHR::eTopLevel,
                                                                       BuildAccelerationStructureFlagsKHR flags_ = {},
                                                                       BuildAccelerationStructureModeKHR  mode_  = BuildAccelerationStructureModeKHR::eBuild,
                                                                       AccelerationStructureKHR           srcAccelerationStructure_   = {},
                                                                       AccelerationStructureKHR           dstAccelerationStructure_   = {},
                                                                       uint32_t                           geometryCount_              = {},
                                                                       const AccelerationStructureGeometryKHR *         pGeometries_  = {},
                                                                       const AccelerationStructureGeometryKHR * const * ppGeometries_ = {},
                                                                       DeviceOrHostAddressKHR                           scratchData_  = {},
                                                                       const void *                                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , flags{ flags_ }
      , mode{ mode_ }
      , srcAccelerationStructure{ srcAccelerationStructure_ }
      , dstAccelerationStructure{ dstAccelerationStructure_ }
      , geometryCount{ geometryCount_ }
      , pGeometries{ pGeometries_ }
      , ppGeometries{ ppGeometries_ }
      , scratchData{ scratchData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureTypeKHR                                                    type_,
                                               BuildAccelerationStructureFlagsKHR                                              flags_,
                                               BuildAccelerationStructureModeKHR                                               mode_,
                                               AccelerationStructureKHR                                                        srcAccelerationStructure_,
                                               AccelerationStructureKHR                                                        dstAccelerationStructure_,
                                               ArrayProxyNoTemporaries<const AccelerationStructureGeometryKHR> const &         geometries_,
                                               ArrayProxyNoTemporaries<const AccelerationStructureGeometryKHR * const> const & pGeometries_ = {},
                                               DeviceOrHostAddressKHR                                                          scratchData_ = {},
                                               const void *                                                                    pNext_       = nullptr )
      : pNext( pNext_ )
      , type( type_ )
      , flags( flags_ )
      , mode( mode_ )
      , srcAccelerationStructure( srcAccelerationStructure_ )
      , dstAccelerationStructure( dstAccelerationStructure_ )
      , geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) )
      , pGeometries( geometries_.data() )
      , ppGeometries( pGeometries_.data() )
      , scratchData( scratchData_ )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 );
#    else
      if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode( BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
      setSrcAccelerationStructure( AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccelerationStructure = srcAccelerationStructure_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
      setDstAccelerationStructure( AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccelerationStructure = dstAccelerationStructure_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = geometryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
      setPGeometries( const AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      pGeometries = pGeometries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR &
      setGeometries( ArrayProxyNoTemporaries<const AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( geometries_.size() );
      pGeometries   = geometries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &
      setPpGeometries( const AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      ppGeometries = ppGeometries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR &
      setPGeometries( ArrayProxyNoTemporaries<const AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( pGeometries_.size() );
      ppGeometries  = pGeometries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
    {
      scratchData = scratchData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildGeometryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildGeometryInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureTypeKHR const &,
               BuildAccelerationStructureFlagsKHR const &,
               BuildAccelerationStructureModeKHR const &,
               AccelerationStructureKHR const &,
               AccelerationStructureKHR const &,
               uint32_t const &,
               const AccelerationStructureGeometryKHR * const &,
               const AccelerationStructureGeometryKHR * const * const &,
               DeviceOrHostAddressKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData );
    }
#endif

  public:
    StructureType                                    sType                    = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
    const void *                                     pNext                    = {};
    AccelerationStructureTypeKHR                     type                     = AccelerationStructureTypeKHR::eTopLevel;
    BuildAccelerationStructureFlagsKHR               flags                    = {};
    BuildAccelerationStructureModeKHR                mode                     = BuildAccelerationStructureModeKHR::eBuild;
    AccelerationStructureKHR                         srcAccelerationStructure = {};
    AccelerationStructureKHR                         dstAccelerationStructure = {};
    uint32_t                                         geometryCount            = {};
    const AccelerationStructureGeometryKHR *         pGeometries              = {};
    const AccelerationStructureGeometryKHR * const * ppGeometries             = {};
    DeviceOrHostAddressKHR                           scratchData              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureBuildGeometryInfoKHR>
  {
    using Type = AccelerationStructureBuildGeometryInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
  {
    using Type = AccelerationStructureBuildGeometryInfoKHR;
  };

  // wrapper struct for struct VkAccelerationStructureBuildRangeInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildRangeInfoKHR.html
  struct AccelerationStructureBuildRangeInfoKHR
  {
    using NativeType = VkAccelerationStructureBuildRangeInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_  = {},
                                                                 uint32_t primitiveOffset_ = {},
                                                                 uint32_t firstVertex_     = {},
                                                                 uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : primitiveCount{ primitiveCount_ }
      , primitiveOffset{ primitiveOffset_ }
      , firstVertex{ firstVertex_ }
      , transformOffset{ transformOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveCount = primitiveCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveOffset = primitiveOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstVertex = firstVertex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      transformOffset = transformOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildRangeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildRangeInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) &&
             ( transformOffset == rhs.transformOffset );
#  endif
    }

    bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t primitiveCount  = {};
    uint32_t primitiveOffset = {};
    uint32_t firstVertex     = {};
    uint32_t transformOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureBuildRangeInfoKHR>
  {
    using Type = AccelerationStructureBuildRangeInfoKHR;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureBuildSizesInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildSizesInfoKHR.html
  struct AccelerationStructureBuildSizesInfoKHR
  {
    using NativeType = VkAccelerationStructureBuildSizesInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureBuildSizesInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( DeviceSize accelerationStructureSize_ = {},
                                                                 DeviceSize updateScratchSize_         = {},
                                                                 DeviceSize buildScratchSize_          = {},
                                                                 void *     pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructureSize{ accelerationStructureSize_ }
      , updateScratchSize{ updateScratchSize_ }
      , buildScratchSize{ buildScratchSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
      return *this;
    }

    operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildSizesInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildSizesInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) &&
             ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize );
#  endif
    }

    bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::eAccelerationStructureBuildSizesInfoKHR;
    void *        pNext                     = {};
    DeviceSize    accelerationStructureSize = {};
    DeviceSize    updateScratchSize         = {};
    DeviceSize    buildScratchSize          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureBuildSizesInfoKHR>
  {
    using Type = AccelerationStructureBuildSizesInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
  {
    using Type = AccelerationStructureBuildSizesInfoKHR;
  };

  // wrapper struct for struct VkAccelerationStructureCaptureDescriptorDataInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCaptureDescriptorDataInfoEXT.html
  struct AccelerationStructureCaptureDescriptorDataInfoEXT
  {
    using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureKHR accelerationStructure_   = {},
                                                                            AccelerationStructureNV  accelerationStructureNV_ = {},
                                                                            const void *             pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructure{ accelerationStructure_ }
      , accelerationStructureNV{ accelerationStructureNV_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast<AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs ) )
    {
    }

    AccelerationStructureCaptureDescriptorDataInfoEXT &
      operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureCaptureDescriptorDataInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT &
      setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT &
      setAccelerationStructureNV( AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureNV = accelerationStructureNV_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkAccelerationStructureCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AccelerationStructureKHR const &, AccelerationStructureNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default;
#else
    bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) &&
             ( accelerationStructureNV == rhs.accelerationStructureNV );
#  endif
    }

    bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                   = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT;
    const void *             pNext                   = {};
    AccelerationStructureKHR accelerationStructure   = {};
    AccelerationStructureNV  accelerationStructureNV = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureCaptureDescriptorDataInfoEXT>
  {
    using Type = AccelerationStructureCaptureDescriptorDataInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT>
  {
    using Type = AccelerationStructureCaptureDescriptorDataInfoEXT;
  };

  // wrapper struct for struct VkAccelerationStructureCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoKHR.html
  struct AccelerationStructureCreateInfoKHR
  {
    using NativeType = VkAccelerationStructureCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateFlagsKHR createFlags_   = {},
                                                             Buffer                              buffer_        = {},
                                                             DeviceSize                          offset_        = {},
                                                             DeviceSize                          size_          = {},
                                                             AccelerationStructureTypeKHR        type_          = AccelerationStructureTypeKHR::eTopLevel,
                                                             DeviceAddress                       deviceAddress_ = {},
                                                             const void *                        pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , createFlags{ createFlags_ }
      , buffer{ buffer_ }
      , offset{ offset_ }
      , size{ size_ }
      , type{ type_ }
      , deviceAddress{ deviceAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags( AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      createFlags = createFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( this );
    }

    operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR *>( this );
    }

    operator VkAccelerationStructureCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( this );
    }

    operator VkAccelerationStructureCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureCreateFlagsKHR const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &,
               AccelerationStructureTypeKHR const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
             ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
#  endif
    }

    bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType         = StructureType::eAccelerationStructureCreateInfoKHR;
    const void *                        pNext         = {};
    AccelerationStructureCreateFlagsKHR createFlags   = {};
    Buffer                              buffer        = {};
    DeviceSize                          offset        = {};
    DeviceSize                          size          = {};
    AccelerationStructureTypeKHR        type          = AccelerationStructureTypeKHR::eTopLevel;
    DeviceAddress                       deviceAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureCreateInfoKHR>
  {
    using Type = AccelerationStructureCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
  {
    using Type = AccelerationStructureCreateInfoKHR;
  };

  // wrapper struct for struct VkGeometryTrianglesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryTrianglesNV.html
  struct GeometryTrianglesNV
  {
    using NativeType = VkGeometryTrianglesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeometryTrianglesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( Buffer       vertexData_      = {},
                                              DeviceSize   vertexOffset_    = {},
                                              uint32_t     vertexCount_     = {},
                                              DeviceSize   vertexStride_    = {},
                                              Format       vertexFormat_    = Format::eUndefined,
                                              Buffer       indexData_       = {},
                                              DeviceSize   indexOffset_     = {},
                                              uint32_t     indexCount_      = {},
                                              IndexType    indexType_       = IndexType::eUint16,
                                              Buffer       transformData_   = {},
                                              DeviceSize   transformOffset_ = {},
                                              const void * pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexData{ vertexData_ }
      , vertexOffset{ vertexOffset_ }
      , vertexCount{ vertexCount_ }
      , vertexStride{ vertexStride_ }
      , vertexFormat{ vertexFormat_ }
      , indexData{ indexData_ }
      , indexOffset{ indexOffset_ }
      , indexCount{ indexCount_ }
      , indexType{ indexType_ }
      , transformData{ transformData_ }
      , transformOffset{ transformOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
    {
    }

    GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeometryTrianglesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexOffset = vertexOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      indexOffset = indexOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexCount = indexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
    {
      transformData = transformData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      transformOffset = transformOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryTrianglesNV *>( this );
    }

    operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryTrianglesNV *>( this );
    }

    operator VkGeometryTrianglesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeometryTrianglesNV *>( this );
    }

    operator VkGeometryTrianglesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeometryTrianglesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Buffer const &,
               DeviceSize const &,
               uint32_t const &,
               DeviceSize const &,
               Format const &,
               Buffer const &,
               DeviceSize const &,
               uint32_t const &,
               IndexType const &,
               Buffer const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       vertexData,
                       vertexOffset,
                       vertexCount,
                       vertexStride,
                       vertexFormat,
                       indexData,
                       indexOffset,
                       indexCount,
                       indexType,
                       transformData,
                       transformOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryTrianglesNV const & ) const = default;
#else
    bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) &&
             ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) &&
             ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) &&
             ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset );
#  endif
    }

    bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eGeometryTrianglesNV;
    const void *  pNext           = {};
    Buffer        vertexData      = {};
    DeviceSize    vertexOffset    = {};
    uint32_t      vertexCount     = {};
    DeviceSize    vertexStride    = {};
    Format        vertexFormat    = Format::eUndefined;
    Buffer        indexData       = {};
    DeviceSize    indexOffset     = {};
    uint32_t      indexCount      = {};
    IndexType     indexType       = IndexType::eUint16;
    Buffer        transformData   = {};
    DeviceSize    transformOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeometryTrianglesNV>
  {
    using Type = GeometryTrianglesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
  {
    using Type = GeometryTrianglesNV;
  };

  // wrapper struct for struct VkGeometryAABBNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryAABBNV.html
  struct GeometryAABBNV
  {
    using NativeType = VkGeometryAABBNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeometryAabbNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryAABBNV(
      Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, DeviceSize offset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , aabbData{ aabbData_ }
      , numAABBs{ numAABBs_ }
      , stride{ stride_ }
      , offset{ offset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) ) {}

    GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeometryAABBNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbData = aabbData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
    {
      numAABBs = numAABBs_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryAABBNV *>( this );
    }

    operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryAABBNV *>( this );
    }

    operator VkGeometryAABBNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeometryAABBNV *>( this );
    }

    operator VkGeometryAABBNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeometryAABBNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &, uint32_t const &, uint32_t const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, aabbData, numAABBs, stride, offset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryAABBNV const & ) const = default;
#else
    bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) &&
             ( offset == rhs.offset );
#  endif
    }

    bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eGeometryAabbNV;
    const void *  pNext    = {};
    Buffer        aabbData = {};
    uint32_t      numAABBs = {};
    uint32_t      stride   = {};
    DeviceSize    offset   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeometryAABBNV>
  {
    using Type = GeometryAABBNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeometryAabbNV>
  {
    using Type = GeometryAABBNV;
  };

  // wrapper struct for struct VkGeometryDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryDataNV.html
  struct GeometryDataNV
  {
    using NativeType = VkGeometryDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryTrianglesNV triangles_ = {}, GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT
      : triangles{ triangles_ }
      , aabbs{ aabbs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) ) {}

    GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeometryDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
    {
      triangles = triangles_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbs = aabbs_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryDataNV *>( this );
    }

    operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryDataNV *>( this );
    }

    operator VkGeometryDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeometryDataNV *>( this );
    }

    operator VkGeometryDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeometryDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<GeometryTrianglesNV const &, GeometryAABBNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( triangles, aabbs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryDataNV const & ) const = default;
#else
    bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs );
#  endif
    }

    bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    GeometryTrianglesNV triangles = {};
    GeometryAABBNV      aabbs     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeometryDataNV>
  {
    using Type = GeometryDataNV;
  };
#endif

  // wrapper struct for struct VkGeometryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryNV.html
  struct GeometryNV
  {
    using NativeType = VkGeometryNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeometryNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryNV( GeometryTypeKHR  geometryType_ = GeometryTypeKHR::eTriangles,
                                     GeometryDataNV   geometry_     = {},
                                     GeometryFlagsKHR flags_        = {},
                                     const void *     pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , geometryType{ geometryType_ }
      , geometry{ geometry_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) ) {}

    GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeometryNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryType = geometryType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
    {
      geometry = geometry_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryNV *>( this );
    }

    operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryNV *>( this );
    }

    operator VkGeometryNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeometryNV *>( this );
    }

    operator VkGeometryNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeometryNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, GeometryTypeKHR const &, GeometryDataNV const &, GeometryFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, geometryType, geometry, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryNV const & ) const = default;
#else
    bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType        = StructureType::eGeometryNV;
    const void *     pNext        = {};
    GeometryTypeKHR  geometryType = GeometryTypeKHR::eTriangles;
    GeometryDataNV   geometry     = {};
    GeometryFlagsKHR flags        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeometryNV>
  {
    using Type = GeometryNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeometryNV>
  {
    using Type = GeometryNV;
  };

  // wrapper struct for struct VkAccelerationStructureInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInfoNV.html
  struct AccelerationStructureInfoNV
  {
    using NativeType = VkAccelerationStructureInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureTypeNV       type_          = {},
                                                      BuildAccelerationStructureFlagsNV flags_         = {},
                                                      uint32_t                          instanceCount_ = {},
                                                      uint32_t                          geometryCount_ = {},
                                                      const GeometryNV *                pGeometries_   = {},
                                                      const void *                      pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , flags{ flags_ }
      , instanceCount{ instanceCount_ }
      , geometryCount{ geometryCount_ }
      , pGeometries{ pGeometries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureInfoNV( AccelerationStructureTypeNV                       type_,
                                 BuildAccelerationStructureFlagsNV                 flags_,
                                 uint32_t                                          instanceCount_,
                                 ArrayProxyNoTemporaries<const GeometryNV> const & geometries_,
                                 const void *                                      pNext_ = nullptr )
      : pNext( pNext_ )
      , type( type_ )
      , flags( flags_ )
      , instanceCount( instanceCount_ )
      , geometryCount( static_cast<uint32_t>( geometries_.size() ) )
      , pGeometries( geometries_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = geometryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      pGeometries = pGeometries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureInfoNV & setGeometries( ArrayProxyNoTemporaries<const GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( geometries_.size() );
      pGeometries   = geometries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureInfoNV *>( this );
    }

    operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureInfoNV *>( this );
    }

    operator VkAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureInfoNV *>( this );
    }

    operator VkAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureTypeNV const &,
               BuildAccelerationStructureFlagsNV const &,
               uint32_t const &,
               uint32_t const &,
               const GeometryNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) &&
             ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries );
#  endif
    }

    bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType         = StructureType::eAccelerationStructureInfoNV;
    const void *                      pNext         = {};
    AccelerationStructureTypeNV       type          = {};
    BuildAccelerationStructureFlagsNV flags         = {};
    uint32_t                          instanceCount = {};
    uint32_t                          geometryCount = {};
    const GeometryNV *                pGeometries   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureInfoNV>
  {
    using Type = AccelerationStructureInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
  {
    using Type = AccelerationStructureInfoNV;
  };

  // wrapper struct for struct VkAccelerationStructureCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoNV.html
  struct AccelerationStructureCreateInfoNV
  {
    using NativeType = VkAccelerationStructureCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( DeviceSize                  compactedSize_ = {},
                                                            AccelerationStructureInfoNV info_          = {},
                                                            const void *                pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , compactedSize{ compactedSize_ }
      , info{ info_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
    {
    }

    AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      compactedSize = compactedSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
    {
      info = info_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceSize const &, AccelerationStructureInfoNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, compactedSize, info );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info );
#  endif
    }

    bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType         = StructureType::eAccelerationStructureCreateInfoNV;
    const void *                pNext         = {};
    DeviceSize                  compactedSize = {};
    AccelerationStructureInfoNV info          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureCreateInfoNV>
  {
    using Type = AccelerationStructureCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
  {
    using Type = AccelerationStructureCreateInfoNV;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX.html
  struct AccelerationStructureDenseGeometryFormatTrianglesDataAMDX
  {
    using NativeType = VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureDenseGeometryFormatTrianglesDataAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( DeviceOrHostAddressConstKHR  compressedData_    = {},
                                                                 DeviceSize                   dataSize_          = {},
                                                                 uint32_t                     numTriangles_      = {},
                                                                 uint32_t                     numVertices_       = {},
                                                                 uint32_t                     maxPrimitiveIndex_ = {},
                                                                 uint32_t                     maxGeometryIndex_  = {},
                                                                 CompressedTriangleFormatAMDX format_            = CompressedTriangleFormatAMDX::eDgf1,
                                                                 const void *                 pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , compressedData{ compressedData_ }
      , dataSize{ dataSize_ }
      , numTriangles{ numTriangles_ }
      , numVertices{ numVertices_ }
      , maxPrimitiveIndex{ maxPrimitiveIndex_ }
      , maxGeometryIndex{ maxGeometryIndex_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureDenseGeometryFormatTrianglesDataAMDX(
          *reinterpret_cast<AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const *>( &rhs ) )
    {
    }

    AccelerationStructureDenseGeometryFormatTrianglesDataAMDX &
      operator=( AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureDenseGeometryFormatTrianglesDataAMDX &
      operator=( VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX &
      setCompressedData( DeviceOrHostAddressConstKHR const & compressedData_ ) VULKAN_HPP_NOEXCEPT
    {
      compressedData = compressedData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setDataSize( DeviceSize dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setNumTriangles( uint32_t numTriangles_ ) VULKAN_HPP_NOEXCEPT
    {
      numTriangles = numTriangles_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setNumVertices( uint32_t numVertices_ ) VULKAN_HPP_NOEXCEPT
    {
      numVertices = numVertices_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setMaxPrimitiveIndex( uint32_t maxPrimitiveIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPrimitiveIndex = maxPrimitiveIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setMaxGeometryIndex( uint32_t maxGeometryIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      maxGeometryIndex = maxGeometryIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setFormat( CompressedTriangleFormatAMDX format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *>( this );
    }

    operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *>( this );
    }

    operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *>( this );
    }

    operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               CompressedTriangleFormatAMDX const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, compressedData, dataSize, numTriangles, numVertices, maxPrimitiveIndex, maxGeometryIndex, format );
    }
#  endif

  public:
    StructureType                sType             = StructureType::eAccelerationStructureDenseGeometryFormatTrianglesDataAMDX;
    const void *                 pNext             = {};
    DeviceOrHostAddressConstKHR  compressedData    = {};
    DeviceSize                   dataSize          = {};
    uint32_t                     numTriangles      = {};
    uint32_t                     numVertices       = {};
    uint32_t                     maxPrimitiveIndex = {};
    uint32_t                     maxGeometryIndex  = {};
    CompressedTriangleFormatAMDX format            = CompressedTriangleFormatAMDX::eDgf1;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX>
  {
    using Type = AccelerationStructureDenseGeometryFormatTrianglesDataAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureDenseGeometryFormatTrianglesDataAMDX>
  {
    using Type = AccelerationStructureDenseGeometryFormatTrianglesDataAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkAccelerationStructureDeviceAddressInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDeviceAddressInfoKHR.html
  struct AccelerationStructureDeviceAddressInfoKHR
  {
    using NativeType = VkAccelerationStructureDeviceAddressInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureDeviceAddressInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureKHR accelerationStructure_ = {},
                                                                    const void *             pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructure{ accelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR &
      setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

    operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

    operator VkAccelerationStructureDeviceAddressInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

    operator VkAccelerationStructureDeviceAddressInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AccelerationStructureKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure );
#  endif
    }

    bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                 = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
    const void *             pNext                 = {};
    AccelerationStructureKHR accelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureDeviceAddressInfoKHR>
  {
    using Type = AccelerationStructureDeviceAddressInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
  {
    using Type = AccelerationStructureDeviceAddressInfoKHR;
  };

  // wrapper struct for struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryLinearSweptSpheresDataNV.html
  struct AccelerationStructureGeometryLinearSweptSpheresDataNV
  {
    using NativeType = VkAccelerationStructureGeometryLinearSweptSpheresDataNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureGeometryLinearSweptSpheresDataNV( Format                              vertexFormat_ = Format::eUndefined,
                                                             DeviceOrHostAddressConstKHR         vertexData_   = {},
                                                             DeviceSize                          vertexStride_ = {},
                                                             Format                              radiusFormat_ = Format::eUndefined,
                                                             DeviceOrHostAddressConstKHR         radiusData_   = {},
                                                             DeviceSize                          radiusStride_ = {},
                                                             IndexType                           indexType_    = IndexType::eUint16,
                                                             DeviceOrHostAddressConstKHR         indexData_    = {},
                                                             DeviceSize                          indexStride_  = {},
                                                             RayTracingLssIndexingModeNV         indexingMode_ = RayTracingLssIndexingModeNV::eList,
                                                             RayTracingLssPrimitiveEndCapsModeNV endCapsMode_  = RayTracingLssPrimitiveEndCapsModeNV::eNone,
                                                             const void *                        pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexFormat{ vertexFormat_ }
      , vertexData{ vertexData_ }
      , vertexStride{ vertexStride_ }
      , radiusFormat{ radiusFormat_ }
      , radiusData{ radiusData_ }
      , radiusStride{ radiusStride_ }
      , indexType{ indexType_ }
      , indexData{ indexData_ }
      , indexStride{ indexStride_ }
      , indexingMode{ indexingMode_ }
      , endCapsMode{ endCapsMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureGeometryLinearSweptSpheresDataNV( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryLinearSweptSpheresDataNV( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryLinearSweptSpheresDataNV( *reinterpret_cast<AccelerationStructureGeometryLinearSweptSpheresDataNV const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryLinearSweptSpheresDataNV &
      operator=( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryLinearSweptSpheresDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexFormat( Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV &
      setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexStride( DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusFormat( Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusFormat = radiusFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV &
      setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusData = radiusData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusStride( DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusStride = radiusStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV &
      setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexStride( DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexStride = indexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV &
      setIndexingMode( RayTracingLssIndexingModeNV indexingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      indexingMode = indexingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV &
      setEndCapsMode( RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) VULKAN_HPP_NOEXCEPT
    {
      endCapsMode = endCapsMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryLinearSweptSpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryLinearSweptSpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryLinearSweptSpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryLinearSweptSpheresDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               IndexType const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               RayTracingLssIndexingModeNV const &,
               RayTracingLssPrimitiveEndCapsModeNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       vertexFormat,
                       vertexData,
                       vertexStride,
                       radiusFormat,
                       radiusData,
                       radiusStride,
                       indexType,
                       indexData,
                       indexStride,
                       indexingMode,
                       endCapsMode );
    }
#endif

  public:
    StructureType                       sType        = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV;
    const void *                        pNext        = {};
    Format                              vertexFormat = Format::eUndefined;
    DeviceOrHostAddressConstKHR         vertexData   = {};
    DeviceSize                          vertexStride = {};
    Format                              radiusFormat = Format::eUndefined;
    DeviceOrHostAddressConstKHR         radiusData   = {};
    DeviceSize                          radiusStride = {};
    IndexType                           indexType    = IndexType::eUint16;
    DeviceOrHostAddressConstKHR         indexData    = {};
    DeviceSize                          indexStride  = {};
    RayTracingLssIndexingModeNV         indexingMode = RayTracingLssIndexingModeNV::eList;
    RayTracingLssPrimitiveEndCapsModeNV endCapsMode  = RayTracingLssPrimitiveEndCapsModeNV::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryLinearSweptSpheresDataNV>
  {
    using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV>
  {
    using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV;
  };

  // wrapper struct for struct VkAccelerationStructureGeometryMotionTrianglesDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryMotionTrianglesDataNV.html
  struct AccelerationStructureGeometryMotionTrianglesDataNV
  {
    using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( DeviceOrHostAddressConstKHR vertexData_ = {},
                                                                                const void *                pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexData{ vertexData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometryMotionTrianglesDataNV &
      operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV &
      setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryMotionTrianglesDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
    }

    operator VkAccelerationStructureGeometryMotionTrianglesDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceOrHostAddressConstKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexData );
    }
#endif

  public:
    StructureType               sType      = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
    const void *                pNext      = {};
    DeviceOrHostAddressConstKHR vertexData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometryMotionTrianglesDataNV>
  {
    using Type = AccelerationStructureGeometryMotionTrianglesDataNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV>
  {
    using Type = AccelerationStructureGeometryMotionTrianglesDataNV;
  };

  // wrapper struct for struct VkAccelerationStructureGeometrySpheresDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometrySpheresDataNV.html
  struct AccelerationStructureGeometrySpheresDataNV
  {
    using NativeType = VkAccelerationStructureGeometrySpheresDataNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureGeometrySpheresDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( Format                      vertexFormat_ = Format::eUndefined,
                                                                        DeviceOrHostAddressConstKHR vertexData_   = {},
                                                                        DeviceSize                  vertexStride_ = {},
                                                                        Format                      radiusFormat_ = Format::eUndefined,
                                                                        DeviceOrHostAddressConstKHR radiusData_   = {},
                                                                        DeviceSize                  radiusStride_ = {},
                                                                        IndexType                   indexType_    = IndexType::eUint16,
                                                                        DeviceOrHostAddressConstKHR indexData_    = {},
                                                                        DeviceSize                  indexStride_  = {},
                                                                        const void *                pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexFormat{ vertexFormat_ }
      , vertexData{ vertexData_ }
      , vertexStride{ vertexStride_ }
      , radiusFormat{ radiusFormat_ }
      , radiusData{ radiusData_ }
      , radiusStride{ radiusStride_ }
      , indexType{ indexType_ }
      , indexData{ indexData_ }
      , indexStride{ indexStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometrySpheresDataNV( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometrySpheresDataNV( *reinterpret_cast<AccelerationStructureGeometrySpheresDataNV const *>( &rhs ) )
    {
    }

    AccelerationStructureGeometrySpheresDataNV & operator=( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureGeometrySpheresDataNV & operator=( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureGeometrySpheresDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexFormat( Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexStride( DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusFormat( Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusFormat = radiusFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusData = radiusData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusStride( DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT
    {
      radiusStride = radiusStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexStride( DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexStride = indexStride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureGeometrySpheresDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometrySpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometrySpheresDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometrySpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometrySpheresDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureGeometrySpheresDataNV *>( this );
    }

    operator VkAccelerationStructureGeometrySpheresDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureGeometrySpheresDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               IndexType const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride );
    }
#endif

  public:
    StructureType               sType        = StructureType::eAccelerationStructureGeometrySpheresDataNV;
    const void *                pNext        = {};
    Format                      vertexFormat = Format::eUndefined;
    DeviceOrHostAddressConstKHR vertexData   = {};
    DeviceSize                  vertexStride = {};
    Format                      radiusFormat = Format::eUndefined;
    DeviceOrHostAddressConstKHR radiusData   = {};
    DeviceSize                  radiusStride = {};
    IndexType                   indexType    = IndexType::eUint16;
    DeviceOrHostAddressConstKHR indexData    = {};
    DeviceSize                  indexStride  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureGeometrySpheresDataNV>
  {
    using Type = AccelerationStructureGeometrySpheresDataNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometrySpheresDataNV>
  {
    using Type = AccelerationStructureGeometrySpheresDataNV;
  };

  // wrapper struct for struct VkTransformMatrixKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTransformMatrixKHR.html
  struct TransformMatrixKHR
  {
    using NativeType = VkTransformMatrixKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array<std::array<float, 4>, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix{ matrix_ } {}

    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) ) {}

    TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TransformMatrixKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float, 4>, 3> matrix_ ) VULKAN_HPP_NOEXCEPT
    {
      matrix = matrix_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTransformMatrixKHR *>( this );
    }

    operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTransformMatrixKHR *>( this );
    }

    operator VkTransformMatrixKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTransformMatrixKHR *>( this );
    }

    operator VkTransformMatrixKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTransformMatrixKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ArrayWrapper2D<float, 3, 4> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( matrix );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TransformMatrixKHR const & ) const = default;
#else
    bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( matrix == rhs.matrix );
#  endif
    }

    bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ArrayWrapper2D<float, 3, 4> matrix = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTransformMatrixKHR>
  {
    using Type = TransformMatrixKHR;
  };
#endif
  using TransformMatrixNV = TransformMatrixKHR;

  // wrapper struct for struct VkAccelerationStructureInstanceKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInstanceKHR.html
  struct AccelerationStructureInstanceKHR
  {
    using NativeType = VkAccelerationStructureInstanceKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( TransformMatrixKHR       transform_                              = {},
                                                              uint32_t                 instanceCustomIndex_                    = {},
                                                              uint32_t                 mask_                                   = {},
                                                              uint32_t                 instanceShaderBindingTableRecordOffset_ = {},
                                                              GeometryInstanceFlagsKHR flags_                                  = {},
                                                              uint64_t                 accelerationStructureReference_         = {} ) VULKAN_HPP_NOEXCEPT
      : transform{ transform_ }
      , instanceCustomIndex{ instanceCustomIndex_ }
      , mask{ mask_ }
      , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }
      , flags{ flags_ }
      , accelerationStructureReference{ accelerationStructureReference_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCustomIndex = instanceCustomIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
    {
      mask = mask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
      setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
      return *this;
    }

    AccelerationStructureInstanceKHR & setFlags( GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureReference = accelerationStructureReference_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureInstanceKHR *>( this );
    }

    operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureInstanceKHR *>( this );
    }

    operator VkAccelerationStructureInstanceKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureInstanceKHR *>( this );
    }

    operator VkAccelerationStructureInstanceKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureInstanceKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) &&
             ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
             ( accelerationStructureReference == rhs.accelerationStructureReference );
#  endif
    }

    bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    TransformMatrixKHR         transform = {};
    uint32_t                   instanceCustomIndex                    : 24;
    uint32_t                   mask                                   : 8;
    uint32_t                   instanceShaderBindingTableRecordOffset : 24;
    VkGeometryInstanceFlagsKHR flags                                  : 8;
    uint64_t                   accelerationStructureReference = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureInstanceKHR>
  {
    using Type = AccelerationStructureInstanceKHR;
  };
#endif
  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;

  // wrapper struct for struct VkAccelerationStructureMatrixMotionInstanceNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMatrixMotionInstanceNV.html
  struct AccelerationStructureMatrixMotionInstanceNV
  {
    using NativeType = VkAccelerationStructureMatrixMotionInstanceNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( TransformMatrixKHR       transformT0_                            = {},
                                                                         TransformMatrixKHR       transformT1_                            = {},
                                                                         uint32_t                 instanceCustomIndex_                    = {},
                                                                         uint32_t                 mask_                                   = {},
                                                                         uint32_t                 instanceShaderBindingTableRecordOffset_ = {},
                                                                         GeometryInstanceFlagsKHR flags_                                  = {},
                                                                         uint64_t                 accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
      : transformT0{ transformT0_ }
      , transformT1{ transformT1_ }
      , instanceCustomIndex{ instanceCustomIndex_ }
      , mask{ mask_ }
      , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }
      , flags{ flags_ }
      , accelerationStructureReference{ accelerationStructureReference_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
    {
    }

    AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0( TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
    {
      transformT0 = transformT0_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1( TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
    {
      transformT1 = transformT1_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCustomIndex = instanceCustomIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
    {
      mask = mask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
      setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
      return *this;
    }

    AccelerationStructureMatrixMotionInstanceNV & setFlags( GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &
      setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureReference = accelerationStructureReference_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMatrixMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMatrixMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<TransformMatrixKHR const &,
               TransformMatrixKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               VkGeometryInstanceFlagsKHR const &,
               uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default;
#else
    bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
             ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
             ( accelerationStructureReference == rhs.accelerationStructureReference );
#  endif
    }

    bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    TransformMatrixKHR         transformT0 = {};
    TransformMatrixKHR         transformT1 = {};
    uint32_t                   instanceCustomIndex                    : 24;
    uint32_t                   mask                                   : 8;
    uint32_t                   instanceShaderBindingTableRecordOffset : 24;
    VkGeometryInstanceFlagsKHR flags                                  : 8;
    uint64_t                   accelerationStructureReference = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureMatrixMotionInstanceNV>
  {
    using Type = AccelerationStructureMatrixMotionInstanceNV;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureMemoryRequirementsInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html
  struct AccelerationStructureMemoryRequirementsInfoNV
  {
    using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(
      AccelerationStructureMemoryRequirementsTypeNV type_                  = AccelerationStructureMemoryRequirementsTypeNV::eObject,
      AccelerationStructureNV                       accelerationStructure_ = {},
      const void *                                  pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , accelerationStructure{ accelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
    {
    }

    AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType( AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &
      setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

    operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

    operator VkAccelerationStructureMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

    operator VkAccelerationStructureMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AccelerationStructureMemoryRequirementsTypeNV const &, AccelerationStructureNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, accelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure );
#  endif
    }

    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                 sType                 = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
    const void *                                  pNext                 = {};
    AccelerationStructureMemoryRequirementsTypeNV type                  = AccelerationStructureMemoryRequirementsTypeNV::eObject;
    AccelerationStructureNV                       accelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureMemoryRequirementsInfoNV>
  {
    using Type = AccelerationStructureMemoryRequirementsInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
  {
    using Type = AccelerationStructureMemoryRequirementsInfoNV;
  };

  // wrapper struct for struct VkAccelerationStructureMotionInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInfoNV.html
  struct AccelerationStructureMotionInfoNV
  {
    using NativeType = VkAccelerationStructureMotionInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureMotionInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t                               maxInstances_ = {},
                                                            AccelerationStructureMotionInfoFlagsNV flags_        = {},
                                                            const void *                           pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxInstances{ maxInstances_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
    {
    }

    AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
    {
      maxInstances = maxInstances_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags( AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV *>( this );
    }

    operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureMotionInfoNV *>( this );
    }

    operator VkAccelerationStructureMotionInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureMotionInfoNV *>( this );
    }

    operator VkAccelerationStructureMotionInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureMotionInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, AccelerationStructureMotionInfoFlagsNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxInstances, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType        = StructureType::eAccelerationStructureMotionInfoNV;
    const void *                           pNext        = {};
    uint32_t                               maxInstances = {};
    AccelerationStructureMotionInfoFlagsNV flags        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureMotionInfoNV>
  {
    using Type = AccelerationStructureMotionInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureMotionInfoNV>
  {
    using Type = AccelerationStructureMotionInfoNV;
  };

  // wrapper struct for struct VkSRTDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSRTDataNV.html
  struct SRTDataNV
  {
    using NativeType = VkSRTDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_  = {},
                                    float a_   = {},
                                    float b_   = {},
                                    float pvx_ = {},
                                    float sy_  = {},
                                    float c_   = {},
                                    float pvy_ = {},
                                    float sz_  = {},
                                    float pvz_ = {},
                                    float qx_  = {},
                                    float qy_  = {},
                                    float qz_  = {},
                                    float qw_  = {},
                                    float tx_  = {},
                                    float ty_  = {},
                                    float tz_  = {} ) VULKAN_HPP_NOEXCEPT
      : sx{ sx_ }
      , a{ a_ }
      , b{ b_ }
      , pvx{ pvx_ }
      , sy{ sy_ }
      , c{ c_ }
      , pvy{ pvy_ }
      , sz{ sz_ }
      , pvz{ pvz_ }
      , qx{ qx_ }
      , qy{ qy_ }
      , qz{ qz_ }
      , qw{ qw_ }
      , tx{ tx_ }
      , ty{ ty_ }
      , tz{ tz_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) ) {}

    SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SRTDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT
    {
      sx = sx_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT
    {
      a = a_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT
    {
      b = b_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT
    {
      pvx = pvx_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT
    {
      sy = sy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT
    {
      c = c_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT
    {
      pvy = pvy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT
    {
      sz = sz_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT
    {
      pvz = pvz_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT
    {
      qx = qx_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT
    {
      qy = qy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT
    {
      qz = qz_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT
    {
      qw = qw_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT
    {
      tx = tx_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT
    {
      ty = ty_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT
    {
      tz = tz_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSRTDataNV *>( this );
    }

    operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSRTDataNV *>( this );
    }

    operator VkSRTDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSRTDataNV *>( this );
    }

    operator VkSRTDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSRTDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &,
               float const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SRTDataNV const & ) const = default;
#else
    bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) &&
             ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) &&
             ( ty == rhs.ty ) && ( tz == rhs.tz );
#  endif
    }

    bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float sx  = {};
    float a   = {};
    float b   = {};
    float pvx = {};
    float sy  = {};
    float c   = {};
    float pvy = {};
    float sz  = {};
    float pvz = {};
    float qx  = {};
    float qy  = {};
    float qz  = {};
    float qw  = {};
    float tx  = {};
    float ty  = {};
    float tz  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSRTDataNV>
  {
    using Type = SRTDataNV;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureSRTMotionInstanceNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureSRTMotionInstanceNV.html
  struct AccelerationStructureSRTMotionInstanceNV
  {
    using NativeType = VkAccelerationStructureSRTMotionInstanceNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( SRTDataNV                transformT0_                            = {},
                                                                   SRTDataNV                transformT1_                            = {},
                                                                   uint32_t                 instanceCustomIndex_                    = {},
                                                                   uint32_t                 mask_                                   = {},
                                                                   uint32_t                 instanceShaderBindingTableRecordOffset_ = {},
                                                                   GeometryInstanceFlagsKHR flags_                                  = {},
                                                                   uint64_t                 accelerationStructureReference_         = {} ) VULKAN_HPP_NOEXCEPT
      : transformT0{ transformT0_ }
      , transformT1{ transformT1_ }
      , instanceCustomIndex{ instanceCustomIndex_ }
      , mask{ mask_ }
      , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ }
      , flags{ flags_ }
      , accelerationStructureReference{ accelerationStructureReference_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
    {
    }

    AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0( SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT
    {
      transformT0 = transformT0_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1( SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT
    {
      transformT1 = transformT1_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCustomIndex = instanceCustomIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
    {
      mask = mask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &
      setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
      return *this;
    }

    AccelerationStructureSRTMotionInstanceNV & setFlags( GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &
      setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureReference = accelerationStructureReference_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureSRTMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureSRTMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<SRTDataNV const &, SRTDataNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default;
#else
    bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
             ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) &&
             ( accelerationStructureReference == rhs.accelerationStructureReference );
#  endif
    }

    bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    SRTDataNV                  transformT0 = {};
    SRTDataNV                  transformT1 = {};
    uint32_t                   instanceCustomIndex                    : 24;
    uint32_t                   mask                                   : 8;
    uint32_t                   instanceShaderBindingTableRecordOffset : 24;
    VkGeometryInstanceFlagsKHR flags                                  : 8;
    uint64_t                   accelerationStructureReference = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureSRTMotionInstanceNV>
  {
    using Type = AccelerationStructureSRTMotionInstanceNV;
  };
#endif

  union AccelerationStructureMotionInstanceDataNV
  {
    using NativeType = VkAccelerationStructureMotionInstanceDataNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureInstanceKHR staticInstance_ = {} )
      : staticInstance( staticInstance_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
      : matrixMotionInstance( matrixMotionInstance_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
      : srtMotionInstance( srtMotionInstance_ )
    {
    }
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
      setStaticInstance( AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      staticInstance = staticInstance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
      setMatrixMotionInstance( AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      matrixMotionInstance = matrixMotionInstance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV &
      setSrtMotionInstance( AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      srtMotionInstance = srtMotionInstance_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureMotionInstanceDataNV const &() const
    {
      return *reinterpret_cast<const VkAccelerationStructureMotionInstanceDataNV *>( this );
    }

    operator VkAccelerationStructureMotionInstanceDataNV &()
    {
      return *reinterpret_cast<VkAccelerationStructureMotionInstanceDataNV *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    AccelerationStructureInstanceKHR            staticInstance;
    AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
    AccelerationStructureSRTMotionInstanceNV    srtMotionInstance;
#else
    VkAccelerationStructureInstanceKHR            staticInstance;
    VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
    VkAccelerationStructureSRTMotionInstanceNV    srtMotionInstance;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureMotionInstanceDataNV>
  {
    using Type = AccelerationStructureMotionInstanceDataNV;
  };
#endif

  // wrapper struct for struct VkAccelerationStructureMotionInstanceNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceNV.html
  struct AccelerationStructureMotionInstanceNV
  {
    using NativeType = VkAccelerationStructureMotionInstanceNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceTypeNV  type_  = AccelerationStructureMotionInstanceTypeNV::eStatic,
                                             AccelerationStructureMotionInstanceFlagsNV flags_ = {},
                                             AccelerationStructureMotionInstanceDataNV  data_  = {} ) VULKAN_HPP_NOEXCEPT
      : type{ type_ }
      , flags{ flags_ }
      , data{ data_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
    {
    }

    AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType( AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags( AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData( AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureMotionInstanceNV *>( this );
    }

    operator VkAccelerationStructureMotionInstanceNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureMotionInstanceNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<AccelerationStructureMotionInstanceTypeNV const &,
               AccelerationStructureMotionInstanceFlagsNV const &,
               AccelerationStructureMotionInstanceDataNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( type, flags, data );
    }
#endif

  public:
    AccelerationStructureMotionInstanceTypeNV  type  = AccelerationStructureMotionInstanceTypeNV::eStatic;
    AccelerationStructureMotionInstanceFlagsNV flags = {};
    AccelerationStructureMotionInstanceDataNV  data  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureMotionInstanceNV>
  {
    using Type = AccelerationStructureMotionInstanceNV;
  };
#endif

  // wrapper struct for struct VkMicromapUsageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapUsageEXT.html
  struct MicromapUsageEXT
  {
    using NativeType = VkMicromapUsageEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT
      : count{ count_ }
      , subdivisionLevel{ subdivisionLevel_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast<MicromapUsageEXT const *>( &rhs ) ) {}

    MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapUsageEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
    {
      count = count_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      subdivisionLevel = subdivisionLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapUsageEXT *>( this );
    }

    operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapUsageEXT *>( this );
    }

    operator VkMicromapUsageEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapUsageEXT *>( this );
    }

    operator VkMicromapUsageEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapUsageEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( count, subdivisionLevel, format );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MicromapUsageEXT const & ) const = default;
#else
    bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
#  endif
    }

    bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t count            = {};
    uint32_t subdivisionLevel = {};
    uint32_t format           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapUsageEXT>
  {
    using Type = MicromapUsageEXT;
  };
#endif

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkAccelerationStructureTrianglesDisplacementMicromapNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesDisplacementMicromapNV.html
  struct AccelerationStructureTrianglesDisplacementMicromapNV
  {
    using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( Format displacementBiasAndScaleFormat_ = Format::eUndefined,
                                                                                  Format displacementVectorFormat_       = Format::eUndefined,
                                                                                  DeviceOrHostAddressConstKHR      displacementBiasAndScaleBuffer_        = {},
                                                                                  DeviceSize                       displacementBiasAndScaleStride_        = {},
                                                                                  DeviceOrHostAddressConstKHR      displacementVectorBuffer_              = {},
                                                                                  DeviceSize                       displacementVectorStride_              = {},
                                                                                  DeviceOrHostAddressConstKHR      displacedMicromapPrimitiveFlags_       = {},
                                                                                  DeviceSize                       displacedMicromapPrimitiveFlagsStride_ = {},
                                                                                  IndexType                        indexType_        = IndexType::eUint16,
                                                                                  DeviceOrHostAddressConstKHR      indexBuffer_      = {},
                                                                                  DeviceSize                       indexStride_      = {},
                                                                                  uint32_t                         baseTriangle_     = {},
                                                                                  uint32_t                         usageCountsCount_ = {},
                                                                                  const MicromapUsageEXT *         pUsageCounts_     = {},
                                                                                  const MicromapUsageEXT * const * ppUsageCounts_    = {},
                                                                                  MicromapEXT                      micromap_         = {},
                                                                                  void *                           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ }
      , displacementVectorFormat{ displacementVectorFormat_ }
      , displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ }
      , displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ }
      , displacementVectorBuffer{ displacementVectorBuffer_ }
      , displacementVectorStride{ displacementVectorStride_ }
      , displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ }
      , displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ }
      , indexType{ indexType_ }
      , indexBuffer{ indexBuffer_ }
      , indexStride{ indexStride_ }
      , baseTriangle{ baseTriangle_ }
      , usageCountsCount{ usageCountsCount_ }
      , pUsageCounts{ pUsageCounts_ }
      , ppUsageCounts{ ppUsageCounts_ }
      , micromap{ micromap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast<AccelerationStructureTrianglesDisplacementMicromapNV const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesDisplacementMicromapNV( Format                                                  displacementBiasAndScaleFormat_,
                                                          Format                                                  displacementVectorFormat_,
                                                          DeviceOrHostAddressConstKHR                             displacementBiasAndScaleBuffer_,
                                                          DeviceSize                                              displacementBiasAndScaleStride_,
                                                          DeviceOrHostAddressConstKHR                             displacementVectorBuffer_,
                                                          DeviceSize                                              displacementVectorStride_,
                                                          DeviceOrHostAddressConstKHR                             displacedMicromapPrimitiveFlags_,
                                                          DeviceSize                                              displacedMicromapPrimitiveFlagsStride_,
                                                          IndexType                                               indexType_,
                                                          DeviceOrHostAddressConstKHR                             indexBuffer_,
                                                          DeviceSize                                              indexStride_,
                                                          uint32_t                                                baseTriangle_,
                                                          ArrayProxyNoTemporaries<const MicromapUsageEXT> const & usageCounts_,
                                                          ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_ = {},
                                                          MicromapEXT                                                     micromap_     = {},
                                                          void *                                                          pNext_        = nullptr )
      : pNext( pNext_ )
      , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ )
      , displacementVectorFormat( displacementVectorFormat_ )
      , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ )
      , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ )
      , displacementVectorBuffer( displacementVectorBuffer_ )
      , displacementVectorStride( displacementVectorStride_ )
      , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ )
      , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ )
      , indexType( indexType_ )
      , indexBuffer( indexBuffer_ )
      , indexStride( indexStride_ )
      , baseTriangle( baseTriangle_ )
      , usageCountsCount( static_cast<uint32_t>( usageCounts_.size() ) )
      , pUsageCounts( usageCounts_.data() )
      , ppUsageCounts( pUsageCounts_.data() )
      , micromap( micromap_ )
    {
#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) );
#      else
      if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureTrianglesDisplacementMicromapNV &
      operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureTrianglesDisplacementMicromapNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementBiasAndScaleFormat( Format displacementBiasAndScaleFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementVectorFormat( Format displacementVectorFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementVectorFormat = displacementVectorFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementBiasAndScaleBuffer( DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementBiasAndScaleStride( DeviceSize displacementBiasAndScaleStride_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementBiasAndScaleStride = displacementBiasAndScaleStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementVectorBuffer( DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementVectorBuffer = displacementVectorBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacementVectorStride( DeviceSize displacementVectorStride_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementVectorStride = displacementVectorStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacedMicromapPrimitiveFlags( DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setDisplacedMicromapPrimitiveFlagsStride( DeviceSize displacedMicromapPrimitiveFlagsStride_ ) VULKAN_HPP_NOEXCEPT
    {
      displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBuffer = indexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexStride( DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexStride = indexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
    {
      baseTriangle = baseTriangle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = usageCountsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      pUsageCounts = pUsageCounts_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesDisplacementMicromapNV &
      setUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
      pUsageCounts     = usageCounts_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV &
      setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      ppUsageCounts = ppUsageCounts_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesDisplacementMicromapNV &
      setPUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
      ppUsageCounts    = pUsageCounts_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setMicromap( MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
    {
      micromap = micromap_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureTrianglesDisplacementMicromapNV *>( this );
    }

    operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureTrianglesDisplacementMicromapNV *>( this );
    }

    operator VkAccelerationStructureTrianglesDisplacementMicromapNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureTrianglesDisplacementMicromapNV *>( this );
    }

    operator VkAccelerationStructureTrianglesDisplacementMicromapNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureTrianglesDisplacementMicromapNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               Format const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               IndexType const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               const MicromapUsageEXT * const &,
               const MicromapUsageEXT * const * const &,
               MicromapEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       displacementBiasAndScaleFormat,
                       displacementVectorFormat,
                       displacementBiasAndScaleBuffer,
                       displacementBiasAndScaleStride,
                       displacementVectorBuffer,
                       displacementVectorStride,
                       displacedMicromapPrimitiveFlags,
                       displacedMicromapPrimitiveFlagsStride,
                       indexType,
                       indexBuffer,
                       indexStride,
                       baseTriangle,
                       usageCountsCount,
                       pUsageCounts,
                       ppUsageCounts,
                       micromap );
    }
#  endif

  public:
    StructureType                    sType                                 = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV;
    void *                           pNext                                 = {};
    Format                           displacementBiasAndScaleFormat        = Format::eUndefined;
    Format                           displacementVectorFormat              = Format::eUndefined;
    DeviceOrHostAddressConstKHR      displacementBiasAndScaleBuffer        = {};
    DeviceSize                       displacementBiasAndScaleStride        = {};
    DeviceOrHostAddressConstKHR      displacementVectorBuffer              = {};
    DeviceSize                       displacementVectorStride              = {};
    DeviceOrHostAddressConstKHR      displacedMicromapPrimitiveFlags       = {};
    DeviceSize                       displacedMicromapPrimitiveFlagsStride = {};
    IndexType                        indexType                             = IndexType::eUint16;
    DeviceOrHostAddressConstKHR      indexBuffer                           = {};
    DeviceSize                       indexStride                           = {};
    uint32_t                         baseTriangle                          = {};
    uint32_t                         usageCountsCount                      = {};
    const MicromapUsageEXT *         pUsageCounts                          = {};
    const MicromapUsageEXT * const * ppUsageCounts                         = {};
    MicromapEXT                      micromap                              = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureTrianglesDisplacementMicromapNV>
  {
    using Type = AccelerationStructureTrianglesDisplacementMicromapNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV>
  {
    using Type = AccelerationStructureTrianglesDisplacementMicromapNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkAccelerationStructureTrianglesOpacityMicromapEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesOpacityMicromapEXT.html
  struct AccelerationStructureTrianglesOpacityMicromapEXT
  {
    using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT( IndexType                        indexType_        = IndexType::eUint16,
                                                                              DeviceOrHostAddressConstKHR      indexBuffer_      = {},
                                                                              DeviceSize                       indexStride_      = {},
                                                                              uint32_t                         baseTriangle_     = {},
                                                                              uint32_t                         usageCountsCount_ = {},
                                                                              const MicromapUsageEXT *         pUsageCounts_     = {},
                                                                              const MicromapUsageEXT * const * ppUsageCounts_    = {},
                                                                              MicromapEXT                      micromap_         = {},
                                                                              void *                           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , indexType{ indexType_ }
      , indexBuffer{ indexBuffer_ }
      , indexStride{ indexStride_ }
      , baseTriangle{ baseTriangle_ }
      , usageCountsCount{ usageCountsCount_ }
      , pUsageCounts{ pUsageCounts_ }
      , ppUsageCounts{ ppUsageCounts_ }
      , micromap{ micromap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesOpacityMicromapEXT( IndexType                                                       indexType_,
                                                      DeviceOrHostAddressConstKHR                                     indexBuffer_,
                                                      DeviceSize                                                      indexStride_,
                                                      uint32_t                                                        baseTriangle_,
                                                      ArrayProxyNoTemporaries<const MicromapUsageEXT> const &         usageCounts_,
                                                      ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_ = {},
                                                      MicromapEXT                                                     micromap_     = {},
                                                      void *                                                          pNext_        = nullptr )
      : pNext( pNext_ )
      , indexType( indexType_ )
      , indexBuffer( indexBuffer_ )
      , indexStride( indexStride_ )
      , baseTriangle( baseTriangle_ )
      , usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) )
      , pUsageCounts( usageCounts_.data() )
      , ppUsageCounts( pUsageCounts_.data() )
      , micromap( micromap_ )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 );
#    else
      if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureTrianglesOpacityMicromapEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
      setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBuffer = indexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexStride( DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexStride = indexStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT
    {
      baseTriangle = baseTriangle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = usageCountsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      pUsageCounts = pUsageCounts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesOpacityMicromapEXT &
      setUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
      pUsageCounts     = usageCounts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT &
      setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      ppUsageCounts = ppUsageCounts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureTrianglesOpacityMicromapEXT &
      setPUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
      ppUsageCounts    = pUsageCounts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT
    {
      micromap = micromap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
    }

    operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
    }

    operator VkAccelerationStructureTrianglesOpacityMicromapEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
    }

    operator VkAccelerationStructureTrianglesOpacityMicromapEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureTrianglesOpacityMicromapEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               IndexType const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               const MicromapUsageEXT * const &,
               const MicromapUsageEXT * const * const &,
               MicromapEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap );
    }
#endif

  public:
    StructureType                    sType            = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT;
    void *                           pNext            = {};
    IndexType                        indexType        = IndexType::eUint16;
    DeviceOrHostAddressConstKHR      indexBuffer      = {};
    DeviceSize                       indexStride      = {};
    uint32_t                         baseTriangle     = {};
    uint32_t                         usageCountsCount = {};
    const MicromapUsageEXT *         pUsageCounts     = {};
    const MicromapUsageEXT * const * ppUsageCounts    = {};
    MicromapEXT                      micromap         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureTrianglesOpacityMicromapEXT>
  {
    using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT>
  {
    using Type = AccelerationStructureTrianglesOpacityMicromapEXT;
  };

  // wrapper struct for struct VkAccelerationStructureVersionInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureVersionInfoKHR.html
  struct AccelerationStructureVersionInfoKHR
  {
    using NativeType = VkAccelerationStructureVersionInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAccelerationStructureVersionInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pVersionData{ pVersionData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
    {
    }

    AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
    {
      pVersionData = pVersionData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( this );
    }

    operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR *>( this );
    }

    operator VkAccelerationStructureVersionInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( this );
    }

    operator VkAccelerationStructureVersionInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAccelerationStructureVersionInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const uint8_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pVersionData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
#  endif
    }

    bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType        = StructureType::eAccelerationStructureVersionInfoKHR;
    const void *    pNext        = {};
    const uint8_t * pVersionData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAccelerationStructureVersionInfoKHR>
  {
    using Type = AccelerationStructureVersionInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
  {
    using Type = AccelerationStructureVersionInfoKHR;
  };

  // wrapper struct for struct VkAcquireNextImageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireNextImageInfoKHR.html
  struct AcquireNextImageInfoKHR
  {
    using NativeType = VkAcquireNextImageInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAcquireNextImageInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( SwapchainKHR swapchain_  = {},
                                                  uint64_t     timeout_    = {},
                                                  Semaphore    semaphore_  = {},
                                                  Fence        fence_      = {},
                                                  uint32_t     deviceMask_ = {},
                                                  const void * pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchain{ swapchain_ }
      , timeout{ timeout_ }
      , semaphore{ semaphore_ }
      , fence{ fence_ }
      , deviceMask{ deviceMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
    {
    }

    AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
    {
      timeout = timeout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
    }

    operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
    }

    operator VkAcquireNextImageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
    }

    operator VkAcquireNextImageInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainKHR const &, uint64_t const &, Semaphore const &, Fence const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
#else
    bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) &&
             ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask );
#  endif
    }

    bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eAcquireNextImageInfoKHR;
    const void *  pNext      = {};
    SwapchainKHR  swapchain  = {};
    uint64_t      timeout    = {};
    Semaphore     semaphore  = {};
    Fence         fence      = {};
    uint32_t      deviceMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAcquireNextImageInfoKHR>
  {
    using Type = AcquireNextImageInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
  {
    using Type = AcquireNextImageInfoKHR;
  };

  // wrapper struct for struct VkAcquireProfilingLockInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireProfilingLockInfoKHR.html
  struct AcquireProfilingLockInfoKHR
  {
    using NativeType = VkAcquireProfilingLockInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAcquireProfilingLockInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AcquireProfilingLockInfoKHR( AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , timeout{ timeout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
    {
    }

    AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
    {
      timeout = timeout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
    }

    operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
    }

    operator VkAcquireProfilingLockInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
    }

    operator VkAcquireProfilingLockInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AcquireProfilingLockFlagsKHR const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, timeout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
#else
    bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout );
#  endif
    }

    bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType   = StructureType::eAcquireProfilingLockInfoKHR;
    const void *                 pNext   = {};
    AcquireProfilingLockFlagsKHR flags   = {};
    uint64_t                     timeout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAcquireProfilingLockInfoKHR>
  {
    using Type = AcquireProfilingLockInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
  {
    using Type = AcquireProfilingLockInfoKHR;
  };

  typedef void *( VKAPI_PTR * PFN_AllocationFunction )( void * pUserData, size_t size, size_t alignment, SystemAllocationScope allocationScope );

  typedef void *( VKAPI_PTR * PFN_ReallocationFunction )(
    void * pUserData, void * pOriginal, size_t size, size_t alignment, SystemAllocationScope allocationScope );

  typedef void( VKAPI_PTR * PFN_FreeFunction )( void * pUserData, void * pMemory );

  typedef void( VKAPI_PTR * PFN_InternalAllocationNotification )( void *                 pUserData,
                                                                  size_t                 size,
                                                                  InternalAllocationType allocationType,
                                                                  SystemAllocationScope  allocationScope );

  typedef void( VKAPI_PTR * PFN_InternalFreeNotification )( void *                 pUserData,
                                                            size_t                 size,
                                                            InternalAllocationType allocationType,
                                                            SystemAllocationScope  allocationScope );

  // wrapper struct for struct VkAllocationCallbacks, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAllocationCallbacks.html
  struct AllocationCallbacks
  {
    using NativeType = VkAllocationCallbacks;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AllocationCallbacks( void *                             pUserData_             = {},
                                              PFN_AllocationFunction             pfnAllocation_         = {},
                                              PFN_ReallocationFunction           pfnReallocation_       = {},
                                              PFN_FreeFunction                   pfnFree_               = {},
                                              PFN_InternalAllocationNotification pfnInternalAllocation_ = {},
                                              PFN_InternalFreeNotification       pfnInternalFree_       = {} ) VULKAN_HPP_NOEXCEPT
      : pUserData{ pUserData_ }
      , pfnAllocation{ pfnAllocation_ }
      , pfnReallocation{ pfnReallocation_ }
      , pfnFree{ pfnFree_ }
      , pfnInternalAllocation{ pfnInternalAllocation_ }
      , pfnInternalFree{ pfnInternalFree_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
    {
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )

    AllocationCallbacks( void *                               pUserData_,
                         PFN_vkAllocationFunction             pfnAllocation_,
                         PFN_vkReallocationFunction           pfnReallocation_       = {},
                         PFN_vkFreeFunction                   pfnFree_               = {},
                         PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
                         PFN_vkInternalFreeNotification       pfnInternalFree_       = {} ) VULKAN_HPP_NOEXCEPT
      : AllocationCallbacks( pUserData_,
                             reinterpret_cast<PFN_AllocationFunction>( pfnAllocation_ ),
                             reinterpret_cast<PFN_ReallocationFunction>( pfnReallocation_ ),
                             reinterpret_cast<PFN_FreeFunction>( pfnFree_ ),
                             reinterpret_cast<PFN_InternalAllocationNotification>( pfnInternalAllocation_ ),
                             reinterpret_cast<PFN_InternalFreeNotification>( pfnInternalFree_ ) )
    {
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif

    AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AllocationCallbacks const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_AllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnAllocation = pfnAllocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_ReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnReallocation = pfnReallocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_FreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnFree = pfnFree_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_InternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnInternalAllocation = pfnInternalAllocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_InternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnInternalFree = pfnInternalFree_;
      return *this;
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnAllocation( reinterpret_cast<PFN_AllocationFunction>( pfnAllocation_ ) );
    }

    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnReallocation( reinterpret_cast<PFN_ReallocationFunction>( pfnReallocation_ ) );
    }

    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnInternalAllocation( reinterpret_cast<PFN_InternalAllocationNotification>( pfnInternalAllocation_ ) );
    }

    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnInternalFree( reinterpret_cast<PFN_InternalFreeNotification>( pfnInternalFree_ ) );
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAllocationCallbacks *>( this );
    }

    operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAllocationCallbacks *>( this );
    }

    operator VkAllocationCallbacks const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAllocationCallbacks *>( this );
    }

    operator VkAllocationCallbacks *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAllocationCallbacks *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<void * const &,
               PFN_AllocationFunction const &,
               PFN_ReallocationFunction const &,
               PFN_FreeFunction const &,
               PFN_InternalAllocationNotification const &,
               PFN_InternalFreeNotification const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree );
    }
#endif

    bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) &&
             ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
#endif
    }

    bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    void *                             pUserData             = {};
    PFN_AllocationFunction             pfnAllocation         = {};
    PFN_ReallocationFunction           pfnReallocation       = {};
    PFN_FreeFunction                   pfnFree               = {};
    PFN_InternalAllocationNotification pfnInternalAllocation = {};
    PFN_InternalFreeNotification       pfnInternalFree       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAllocationCallbacks>
  {
    using Type = AllocationCallbacks;
  };
#endif

  // wrapper struct for struct VkAmigoProfilingSubmitInfoSEC, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAmigoProfilingSubmitInfoSEC.html
  struct AmigoProfilingSubmitInfoSEC
  {
    using NativeType = VkAmigoProfilingSubmitInfoSEC;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAmigoProfilingSubmitInfoSEC;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , firstDrawTimestamp{ firstDrawTimestamp_ }
      , swapBufferTimestamp{ swapBufferTimestamp_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
      : AmigoProfilingSubmitInfoSEC( *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs ) )
    {
    }

    AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AmigoProfilingSubmitInfoSEC const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT
    {
      firstDrawTimestamp = firstDrawTimestamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT
    {
      swapBufferTimestamp = swapBufferTimestamp_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC *>( this );
    }

    operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAmigoProfilingSubmitInfoSEC *>( this );
    }

    operator VkAmigoProfilingSubmitInfoSEC const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAmigoProfilingSubmitInfoSEC *>( this );
    }

    operator VkAmigoProfilingSubmitInfoSEC *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAmigoProfilingSubmitInfoSEC *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default;
#else
    bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) &&
             ( swapBufferTimestamp == rhs.swapBufferTimestamp );
#  endif
    }

    bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eAmigoProfilingSubmitInfoSEC;
    const void *  pNext               = {};
    uint64_t      firstDrawTimestamp  = {};
    uint64_t      swapBufferTimestamp = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAmigoProfilingSubmitInfoSEC>
  {
    using Type = AmigoProfilingSubmitInfoSEC;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAmigoProfilingSubmitInfoSEC>
  {
    using Type = AmigoProfilingSubmitInfoSEC;
  };

  // wrapper struct for struct VkComponentMapping, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentMapping.html
  struct ComponentMapping
  {
    using NativeType = VkComponentMapping;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity,
                                           ComponentSwizzle g_ = ComponentSwizzle::eIdentity,
                                           ComponentSwizzle b_ = ComponentSwizzle::eIdentity,
                                           ComponentSwizzle a_ = ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
      : r{ r_ }
      , g{ g_ }
      , b{ b_ }
      , a{ a_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) ) {}

    ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ComponentMapping const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
    {
      r = r_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
    {
      g = g_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
    {
      b = b_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
    {
      a = a_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkComponentMapping *>( this );
    }

    operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkComponentMapping *>( this );
    }

    operator VkComponentMapping const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkComponentMapping *>( this );
    }

    operator VkComponentMapping *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkComponentMapping *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ComponentSwizzle const &, ComponentSwizzle const &, ComponentSwizzle const &, ComponentSwizzle const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( r, g, b, a );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ComponentMapping const & ) const = default;
#else
    bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a );
#  endif
    }

    bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ComponentSwizzle r = ComponentSwizzle::eIdentity;
    ComponentSwizzle g = ComponentSwizzle::eIdentity;
    ComponentSwizzle b = ComponentSwizzle::eIdentity;
    ComponentSwizzle a = ComponentSwizzle::eIdentity;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkComponentMapping>
  {
    using Type = ComponentMapping;
  };
#endif

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidHardwareBufferFormatProperties2ANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatProperties2ANDROID.html
  struct AndroidHardwareBufferFormatProperties2ANDROID
  {
    using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferFormatProperties2ANDROID( Format                      format_                           = Format::eUndefined,
                                                     uint64_t                    externalFormat_                   = {},
                                                     FormatFeatureFlags2         formatFeatures_                   = {},
                                                     ComponentMapping            samplerYcbcrConversionComponents_ = {},
                                                     SamplerYcbcrModelConversion suggestedYcbcrModel_              = SamplerYcbcrModelConversion::eRgbIdentity,
                                                     SamplerYcbcrRange           suggestedYcbcrRange_              = SamplerYcbcrRange::eItuFull,
                                                     ChromaLocation              suggestedXChromaOffset_           = ChromaLocation::eCositedEven,
                                                     ChromaLocation              suggestedYChromaOffset_           = ChromaLocation::eCositedEven,
                                                     void *                      pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , externalFormat{ externalFormat_ }
      , formatFeatures{ formatFeatures_ }
      , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }
      , suggestedYcbcrModel{ suggestedYcbcrModel_ }
      , suggestedYcbcrRange{ suggestedYcbcrRange_ }
      , suggestedXChromaOffset{ suggestedXChromaOffset_ }
      , suggestedYChromaOffset{ suggestedYChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
    {
    }

    AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatProperties2ANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatProperties2ANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               uint64_t const &,
               FormatFeatureFlags2 const &,
               ComponentMapping const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ChromaLocation const &,
               ChromaLocation const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       format,
                       externalFormat,
                       formatFeatures,
                       samplerYcbcrConversionComponents,
                       suggestedYcbcrModel,
                       suggestedYcbcrRange,
                       suggestedXChromaOffset,
                       suggestedYChromaOffset );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
             ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
             ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
             ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#    endif
    }

    bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                            = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
    void *                      pNext                            = {};
    Format                      format                           = Format::eUndefined;
    uint64_t                    externalFormat                   = {};
    FormatFeatureFlags2         formatFeatures                   = {};
    ComponentMapping            samplerYcbcrConversionComponents = {};
    SamplerYcbcrModelConversion suggestedYcbcrModel              = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           suggestedYcbcrRange              = SamplerYcbcrRange::eItuFull;
    ChromaLocation              suggestedXChromaOffset           = ChromaLocation::eCositedEven;
    ChromaLocation              suggestedYChromaOffset           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidHardwareBufferFormatProperties2ANDROID>
  {
    using Type = AndroidHardwareBufferFormatProperties2ANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
  {
    using Type = AndroidHardwareBufferFormatProperties2ANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidHardwareBufferFormatPropertiesANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html
  struct AndroidHardwareBufferFormatPropertiesANDROID
  {
    using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferFormatPropertiesANDROID( Format                      format_                           = Format::eUndefined,
                                                    uint64_t                    externalFormat_                   = {},
                                                    FormatFeatureFlags          formatFeatures_                   = {},
                                                    ComponentMapping            samplerYcbcrConversionComponents_ = {},
                                                    SamplerYcbcrModelConversion suggestedYcbcrModel_              = SamplerYcbcrModelConversion::eRgbIdentity,
                                                    SamplerYcbcrRange           suggestedYcbcrRange_              = SamplerYcbcrRange::eItuFull,
                                                    ChromaLocation              suggestedXChromaOffset_           = ChromaLocation::eCositedEven,
                                                    ChromaLocation              suggestedYChromaOffset_           = ChromaLocation::eCositedEven,
                                                    void *                      pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , externalFormat{ externalFormat_ }
      , formatFeatures{ formatFeatures_ }
      , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }
      , suggestedYcbcrModel{ suggestedYcbcrModel_ }
      , suggestedYcbcrRange{ suggestedYcbcrRange_ }
      , suggestedXChromaOffset{ suggestedXChromaOffset_ }
      , suggestedYChromaOffset{ suggestedYChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
    {
    }

    AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               uint64_t const &,
               FormatFeatureFlags const &,
               ComponentMapping const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ChromaLocation const &,
               ChromaLocation const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       format,
                       externalFormat,
                       formatFeatures,
                       samplerYcbcrConversionComponents,
                       suggestedYcbcrModel,
                       suggestedYcbcrRange,
                       suggestedXChromaOffset,
                       suggestedYChromaOffset );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
             ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
             ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
             ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#    endif
    }

    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                            = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
    void *                      pNext                            = {};
    Format                      format                           = Format::eUndefined;
    uint64_t                    externalFormat                   = {};
    FormatFeatureFlags          formatFeatures                   = {};
    ComponentMapping            samplerYcbcrConversionComponents = {};
    SamplerYcbcrModelConversion suggestedYcbcrModel              = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           suggestedYcbcrRange              = SamplerYcbcrRange::eItuFull;
    ChromaLocation              suggestedXChromaOffset           = ChromaLocation::eCositedEven;
    ChromaLocation              suggestedYChromaOffset           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidHardwareBufferFormatPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferFormatPropertiesANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferFormatPropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatResolvePropertiesANDROID.html
  struct AndroidHardwareBufferFormatResolvePropertiesANDROID
  {
    using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID( Format colorAttachmentFormat_ = Format::eUndefined,
                                                                              void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorAttachmentFormat{ colorAttachmentFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatResolvePropertiesANDROID const *>( &rhs ) )
    {
    }

    AndroidHardwareBufferFormatResolvePropertiesANDROID &
      operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidHardwareBufferFormatResolvePropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferFormatResolvePropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferFormatResolvePropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidHardwareBufferFormatResolvePropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidHardwareBufferFormatResolvePropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Format const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorAttachmentFormat );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat );
#    endif
    }

    bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                 = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID;
    void *        pNext                 = {};
    Format        colorAttachmentFormat = Format::eUndefined;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidHardwareBufferFormatResolvePropertiesANDROID>
  {
    using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID>
  {
    using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidHardwareBufferPropertiesANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferPropertiesANDROID.html
  struct AndroidHardwareBufferPropertiesANDROID
  {
    using NativeType = VkAndroidHardwareBufferPropertiesANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidHardwareBufferPropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferPropertiesANDROID( DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allocationSize{ allocationSize_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
    {
    }

    AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferPropertiesANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allocationSize, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eAndroidHardwareBufferPropertiesANDROID;
    void *        pNext          = {};
    DeviceSize    allocationSize = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidHardwareBufferPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferPropertiesANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferPropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidHardwareBufferUsageANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferUsageANDROID.html
  struct AndroidHardwareBufferUsageANDROID
  {
    using NativeType = VkAndroidHardwareBufferUsageANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidHardwareBufferUsageANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , androidHardwareBufferUsage{ androidHardwareBufferUsage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
    {
    }

    AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>( this );
    }

    operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>( this );
    }

    operator VkAndroidHardwareBufferUsageANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>( this );
    }

    operator VkAndroidHardwareBufferUsageANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, androidHardwareBufferUsage );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
#    endif
    }

    bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                      = StructureType::eAndroidHardwareBufferUsageANDROID;
    void *        pNext                      = {};
    uint64_t      androidHardwareBufferUsage = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidHardwareBufferUsageANDROID>
  {
    using Type = AndroidHardwareBufferUsageANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
  {
    using Type = AndroidHardwareBufferUsageANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkAndroidSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidSurfaceCreateInfoKHR.html
  struct AndroidSurfaceCreateInfoKHR
  {
    using NativeType = VkAndroidSurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAndroidSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_  = {},
                                                      struct ANativeWindow *       window_ = {},
                                                      const void *                 pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( this );
    }

    operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>( this );
    }

    operator VkAndroidSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( this );
    }

    operator VkAndroidSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AndroidSurfaceCreateFlagsKHR const &, struct ANativeWindow * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
#    endif
    }

    bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                sType  = StructureType::eAndroidSurfaceCreateInfoKHR;
    const void *                 pNext  = {};
    AndroidSurfaceCreateFlagsKHR flags  = {};
    struct ANativeWindow *       window = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAndroidSurfaceCreateInfoKHR>
  {
    using Type = AndroidSurfaceCreateInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
  {
    using Type = AndroidSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  // wrapper struct for struct VkAntiLagPresentationInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagPresentationInfoAMD.html
  struct AntiLagPresentationInfoAMD
  {
    using NativeType = VkAntiLagPresentationInfoAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAntiLagPresentationInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AntiLagPresentationInfoAMD( AntiLagStageAMD stage_ = AntiLagStageAMD::eInput, uint64_t frameIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stage{ stage_ }
      , frameIndex{ frameIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : AntiLagPresentationInfoAMD( *reinterpret_cast<AntiLagPresentationInfoAMD const *>( &rhs ) )
    {
    }

    AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AntiLagPresentationInfoAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( AntiLagStageAMD stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      frameIndex = frameIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAntiLagPresentationInfoAMD *>( this );
    }

    operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAntiLagPresentationInfoAMD *>( this );
    }

    operator VkAntiLagPresentationInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAntiLagPresentationInfoAMD *>( this );
    }

    operator VkAntiLagPresentationInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAntiLagPresentationInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, AntiLagStageAMD const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stage, frameIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default;
#else
    bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( frameIndex == rhs.frameIndex );
#  endif
    }

    bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType      = StructureType::eAntiLagPresentationInfoAMD;
    void *          pNext      = {};
    AntiLagStageAMD stage      = AntiLagStageAMD::eInput;
    uint64_t        frameIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAntiLagPresentationInfoAMD>
  {
    using Type = AntiLagPresentationInfoAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAntiLagPresentationInfoAMD>
  {
    using Type = AntiLagPresentationInfoAMD;
  };

  // wrapper struct for struct VkAntiLagDataAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagDataAMD.html
  struct AntiLagDataAMD
  {
    using NativeType = VkAntiLagDataAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAntiLagDataAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagModeAMD                     mode_              = AntiLagModeAMD::eDriverControl,
                                         uint32_t                           maxFPS_            = {},
                                         const AntiLagPresentationInfoAMD * pPresentationInfo_ = {},
                                         const void *                       pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mode{ mode_ }
      , maxFPS{ maxFPS_ }
      , pPresentationInfo{ pPresentationInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagDataAMD( *reinterpret_cast<AntiLagDataAMD const *>( &rhs ) ) {}

    AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AntiLagDataAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( AntiLagModeAMD mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFPS = maxFPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPPresentationInfo( const AntiLagPresentationInfoAMD * pPresentationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentationInfo = pPresentationInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAntiLagDataAMD *>( this );
    }

    operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAntiLagDataAMD *>( this );
    }

    operator VkAntiLagDataAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAntiLagDataAMD *>( this );
    }

    operator VkAntiLagDataAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAntiLagDataAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AntiLagModeAMD const &, uint32_t const &, const AntiLagPresentationInfoAMD * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AntiLagDataAMD const & ) const = default;
#else
    bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( maxFPS == rhs.maxFPS ) &&
             ( pPresentationInfo == rhs.pPresentationInfo );
#  endif
    }

    bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType             = StructureType::eAntiLagDataAMD;
    const void *                       pNext             = {};
    AntiLagModeAMD                     mode              = AntiLagModeAMD::eDriverControl;
    uint32_t                           maxFPS            = {};
    const AntiLagPresentationInfoAMD * pPresentationInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAntiLagDataAMD>
  {
    using Type = AntiLagDataAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAntiLagDataAMD>
  {
    using Type = AntiLagDataAMD;
  };

  // wrapper struct for struct VkApplicationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkApplicationInfo.html
  struct ApplicationInfo
  {
    using NativeType = VkApplicationInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eApplicationInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_   = {},
                                          uint32_t     applicationVersion_ = {},
                                          const char * pEngineName_        = {},
                                          uint32_t     engineVersion_      = {},
                                          uint32_t     apiVersion_         = {},
                                          const void * pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pApplicationName{ pApplicationName_ }
      , applicationVersion{ applicationVersion_ }
      , pEngineName{ pEngineName_ }
      , engineVersion{ engineVersion_ }
      , apiVersion{ apiVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) ) {}

    ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ApplicationInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
    {
      pApplicationName = pApplicationName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      applicationVersion = applicationVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
    {
      pEngineName = pEngineName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      engineVersion = engineVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      apiVersion = apiVersion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkApplicationInfo *>( this );
    }

    operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkApplicationInfo *>( this );
    }

    operator VkApplicationInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkApplicationInfo *>( this );
    }

    operator VkApplicationInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkApplicationInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const char * const &, uint32_t const &, const char * const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( pApplicationName != rhs.pApplicationName )
        if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 )
        return cmp;
      if ( pEngineName != rhs.pEngineName )
        if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) &&
             ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) &&
             ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
    }

    bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType              = StructureType::eApplicationInfo;
    const void *  pNext              = {};
    const char *  pApplicationName   = {};
    uint32_t      applicationVersion = {};
    const char *  pEngineName        = {};
    uint32_t      engineVersion      = {};
    uint32_t      apiVersion         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkApplicationInfo>
  {
    using Type = ApplicationInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eApplicationInfo>
  {
    using Type = ApplicationInfo;
  };

  // wrapper struct for struct VkAttachmentDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription.html
  struct AttachmentDescription
  {
    using NativeType = VkAttachmentDescription;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescriptionFlags flags_          = {},
                                                Format                     format_         = Format::eUndefined,
                                                SampleCountFlagBits        samples_        = SampleCountFlagBits::e1,
                                                AttachmentLoadOp           loadOp_         = AttachmentLoadOp::eLoad,
                                                AttachmentStoreOp          storeOp_        = AttachmentStoreOp::eStore,
                                                AttachmentLoadOp           stencilLoadOp_  = AttachmentLoadOp::eLoad,
                                                AttachmentStoreOp          stencilStoreOp_ = AttachmentStoreOp::eStore,
                                                ImageLayout                initialLayout_  = ImageLayout::eUndefined,
                                                ImageLayout                finalLayout_    = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : flags{ flags_ }
      , format{ format_ }
      , samples{ samples_ }
      , loadOp{ loadOp_ }
      , storeOp{ storeOp_ }
      , stencilLoadOp{ stencilLoadOp_ }
      , stencilStoreOp{ stencilStoreOp_ }
      , initialLayout{ initialLayout_ }
      , finalLayout{ finalLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
    {
    }

    AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentDescription const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      loadOp = loadOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
    {
      storeOp = storeOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLoadOp = stencilLoadOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilStoreOp = stencilStoreOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      finalLayout = finalLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescription *>( this );
    }

    operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescription *>( this );
    }

    operator VkAttachmentDescription const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentDescription *>( this );
    }

    operator VkAttachmentDescription *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentDescription *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<AttachmentDescriptionFlags const &,
               Format const &,
               SampleCountFlagBits const &,
               AttachmentLoadOp const &,
               AttachmentStoreOp const &,
               AttachmentLoadOp const &,
               AttachmentStoreOp const &,
               ImageLayout const &,
               ImageLayout const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescription const & ) const = default;
#else
    bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
             ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
             ( finalLayout == rhs.finalLayout );
#  endif
    }

    bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    AttachmentDescriptionFlags flags          = {};
    Format                     format         = Format::eUndefined;
    SampleCountFlagBits        samples        = SampleCountFlagBits::e1;
    AttachmentLoadOp           loadOp         = AttachmentLoadOp::eLoad;
    AttachmentStoreOp          storeOp        = AttachmentStoreOp::eStore;
    AttachmentLoadOp           stencilLoadOp  = AttachmentLoadOp::eLoad;
    AttachmentStoreOp          stencilStoreOp = AttachmentStoreOp::eStore;
    ImageLayout                initialLayout  = ImageLayout::eUndefined;
    ImageLayout                finalLayout    = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentDescription>
  {
    using Type = AttachmentDescription;
  };
#endif

  // wrapper struct for struct VkAttachmentDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription2.html
  struct AttachmentDescription2
  {
    using NativeType = VkAttachmentDescription2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentDescription2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescriptionFlags flags_          = {},
                                                 Format                     format_         = Format::eUndefined,
                                                 SampleCountFlagBits        samples_        = SampleCountFlagBits::e1,
                                                 AttachmentLoadOp           loadOp_         = AttachmentLoadOp::eLoad,
                                                 AttachmentStoreOp          storeOp_        = AttachmentStoreOp::eStore,
                                                 AttachmentLoadOp           stencilLoadOp_  = AttachmentLoadOp::eLoad,
                                                 AttachmentStoreOp          stencilStoreOp_ = AttachmentStoreOp::eStore,
                                                 ImageLayout                initialLayout_  = ImageLayout::eUndefined,
                                                 ImageLayout                finalLayout_    = ImageLayout::eUndefined,
                                                 const void *               pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , format{ format_ }
      , samples{ samples_ }
      , loadOp{ loadOp_ }
      , storeOp{ storeOp_ }
      , stencilLoadOp{ stencilLoadOp_ }
      , stencilStoreOp{ stencilStoreOp_ }
      , initialLayout{ initialLayout_ }
      , finalLayout{ finalLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
    {
    }

    AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentDescription2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      loadOp = loadOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
    {
      storeOp = storeOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLoadOp = stencilLoadOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilStoreOp = stencilStoreOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      finalLayout = finalLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescription2 *>( this );
    }

    operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescription2 *>( this );
    }

    operator VkAttachmentDescription2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentDescription2 *>( this );
    }

    operator VkAttachmentDescription2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentDescription2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AttachmentDescriptionFlags const &,
               Format const &,
               SampleCountFlagBits const &,
               AttachmentLoadOp const &,
               AttachmentStoreOp const &,
               AttachmentLoadOp const &,
               AttachmentStoreOp const &,
               ImageLayout const &,
               ImageLayout const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescription2 const & ) const = default;
#else
    bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
             ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
             ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout );
#  endif
    }

    bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType          = StructureType::eAttachmentDescription2;
    const void *               pNext          = {};
    AttachmentDescriptionFlags flags          = {};
    Format                     format         = Format::eUndefined;
    SampleCountFlagBits        samples        = SampleCountFlagBits::e1;
    AttachmentLoadOp           loadOp         = AttachmentLoadOp::eLoad;
    AttachmentStoreOp          storeOp        = AttachmentStoreOp::eStore;
    AttachmentLoadOp           stencilLoadOp  = AttachmentLoadOp::eLoad;
    AttachmentStoreOp          stencilStoreOp = AttachmentStoreOp::eStore;
    ImageLayout                initialLayout  = ImageLayout::eUndefined;
    ImageLayout                finalLayout    = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentDescription2>
  {
    using Type = AttachmentDescription2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentDescription2>
  {
    using Type = AttachmentDescription2;
  };

  using AttachmentDescription2KHR = AttachmentDescription2;

  // wrapper struct for struct VkAttachmentDescriptionStencilLayout, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionStencilLayout.html
  struct AttachmentDescriptionStencilLayout
  {
    using NativeType = VkAttachmentDescriptionStencilLayout;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentDescriptionStencilLayout;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( ImageLayout stencilInitialLayout_ = ImageLayout::eUndefined,
                                                             ImageLayout stencilFinalLayout_   = ImageLayout::eUndefined,
                                                             void *      pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stencilInitialLayout{ stencilInitialLayout_ }
      , stencilFinalLayout{ stencilFinalLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
    {
    }

    AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout( ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilInitialLayout = stencilInitialLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout( ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilFinalLayout = stencilFinalLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
    }

    operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
    }

    operator VkAttachmentDescriptionStencilLayout const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
    }

    operator VkAttachmentDescriptionStencilLayout *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageLayout const &, ImageLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
#else
    bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) &&
             ( stencilFinalLayout == rhs.stencilFinalLayout );
#  endif
    }

    bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eAttachmentDescriptionStencilLayout;
    void *        pNext                = {};
    ImageLayout   stencilInitialLayout = ImageLayout::eUndefined;
    ImageLayout   stencilFinalLayout   = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentDescriptionStencilLayout>
  {
    using Type = AttachmentDescriptionStencilLayout;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
  {
    using Type = AttachmentDescriptionStencilLayout;
  };

  using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;

  // wrapper struct for struct VkAttachmentFeedbackLoopInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentFeedbackLoopInfoEXT.html
  struct AttachmentFeedbackLoopInfoEXT
  {
    using NativeType = VkAttachmentFeedbackLoopInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentFeedbackLoopInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT( Bool32 feedbackLoopEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , feedbackLoopEnable{ feedbackLoopEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentFeedbackLoopInfoEXT( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentFeedbackLoopInfoEXT( *reinterpret_cast<AttachmentFeedbackLoopInfoEXT const *>( &rhs ) )
    {
    }

    AttachmentFeedbackLoopInfoEXT & operator=( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentFeedbackLoopInfoEXT & operator=( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentFeedbackLoopInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setFeedbackLoopEnable( Bool32 feedbackLoopEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      feedbackLoopEnable = feedbackLoopEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentFeedbackLoopInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentFeedbackLoopInfoEXT *>( this );
    }

    operator VkAttachmentFeedbackLoopInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentFeedbackLoopInfoEXT *>( this );
    }

    operator VkAttachmentFeedbackLoopInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentFeedbackLoopInfoEXT *>( this );
    }

    operator VkAttachmentFeedbackLoopInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentFeedbackLoopInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, feedbackLoopEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentFeedbackLoopInfoEXT const & ) const = default;
#else
    bool operator==( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( feedbackLoopEnable == rhs.feedbackLoopEnable );
#  endif
    }

    bool operator!=( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::eAttachmentFeedbackLoopInfoEXT;
    const void *  pNext              = {};
    Bool32        feedbackLoopEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentFeedbackLoopInfoEXT>
  {
    using Type = AttachmentFeedbackLoopInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentFeedbackLoopInfoEXT>
  {
    using Type = AttachmentFeedbackLoopInfoEXT;
  };

  // wrapper struct for struct VkAttachmentReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference.html
  struct AttachmentReference
  {
    using NativeType = VkAttachmentReference;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, ImageLayout layout_ = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : attachment{ attachment_ }
      , layout{ layout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
    {
    }

    AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentReference const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
    {
      attachment = attachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReference *>( this );
    }

    operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReference *>( this );
    }

    operator VkAttachmentReference const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentReference *>( this );
    }

    operator VkAttachmentReference *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentReference *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, ImageLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( attachment, layout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReference const & ) const = default;
#else
    bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( attachment == rhs.attachment ) && ( layout == rhs.layout );
#  endif
    }

    bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t    attachment = {};
    ImageLayout layout     = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentReference>
  {
    using Type = AttachmentReference;
  };
#endif

  // wrapper struct for struct VkAttachmentReference2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference2.html
  struct AttachmentReference2
  {
    using NativeType = VkAttachmentReference2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentReference2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t         attachment_ = {},
                                               ImageLayout      layout_     = ImageLayout::eUndefined,
                                               ImageAspectFlags aspectMask_ = {},
                                               const void *     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachment{ attachment_ }
      , layout{ layout_ }
      , aspectMask{ aspectMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
    {
    }

    AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentReference2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
    {
      attachment = attachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReference2 *>( this );
    }

    operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReference2 *>( this );
    }

    operator VkAttachmentReference2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentReference2 *>( this );
    }

    operator VkAttachmentReference2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentReference2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, ImageLayout const &, ImageAspectFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachment, layout, aspectMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReference2 const & ) const = default;
#else
    bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) &&
             ( aspectMask == rhs.aspectMask );
#  endif
    }

    bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType      = StructureType::eAttachmentReference2;
    const void *     pNext      = {};
    uint32_t         attachment = {};
    ImageLayout      layout     = ImageLayout::eUndefined;
    ImageAspectFlags aspectMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentReference2>
  {
    using Type = AttachmentReference2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentReference2>
  {
    using Type = AttachmentReference2;
  };

  using AttachmentReference2KHR = AttachmentReference2;

  // wrapper struct for struct VkAttachmentReferenceStencilLayout, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReferenceStencilLayout.html
  struct AttachmentReferenceStencilLayout
  {
    using NativeType = VkAttachmentReferenceStencilLayout;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentReferenceStencilLayout;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( ImageLayout stencilLayout_ = ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stencilLayout{ stencilLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
    {
    }

    AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLayout = stencilLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
    }

    operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
    }

    operator VkAttachmentReferenceStencilLayout const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
    }

    operator VkAttachmentReferenceStencilLayout *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stencilLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
#else
    bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout );
#  endif
    }

    bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eAttachmentReferenceStencilLayout;
    void *        pNext         = {};
    ImageLayout   stencilLayout = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentReferenceStencilLayout>
  {
    using Type = AttachmentReferenceStencilLayout;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
  {
    using Type = AttachmentReferenceStencilLayout;
  };

  using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;

  // wrapper struct for struct VkAttachmentSampleCountInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleCountInfoAMD.html
  struct AttachmentSampleCountInfoAMD
  {
    using NativeType = VkAttachmentSampleCountInfoAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eAttachmentSampleCountInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( uint32_t                    colorAttachmentCount_          = {},
                                                       const SampleCountFlagBits * pColorAttachmentSamples_       = {},
                                                       SampleCountFlagBits         depthStencilAttachmentSamples_ = SampleCountFlagBits::e1,
                                                       const void *                pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentSamples{ pColorAttachmentSamples_ }
      , depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AttachmentSampleCountInfoAMD( ArrayProxyNoTemporaries<const SampleCountFlagBits> const & colorAttachmentSamples_,
                                  SampleCountFlagBits                                        depthStencilAttachmentSamples_ = SampleCountFlagBits::e1,
                                  const void *                                               pNext_                         = nullptr )
      : pNext( pNext_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) )
      , pColorAttachmentSamples( colorAttachmentSamples_.data() )
      , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &
      setPColorAttachmentSamples( const SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentSamples = pColorAttachmentSamples_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AttachmentSampleCountInfoAMD &
      setColorAttachmentSamples( ArrayProxyNoTemporaries<const SampleCountFlagBits> const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount    = static_cast<uint32_t>( colorAttachmentSamples_.size() );
      pColorAttachmentSamples = colorAttachmentSamples_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &
      setDepthStencilAttachmentSamples( SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD *>( this );
    }

    operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentSampleCountInfoAMD *>( this );
    }

    operator VkAttachmentSampleCountInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentSampleCountInfoAMD *>( this );
    }

    operator VkAttachmentSampleCountInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentSampleCountInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const SampleCountFlagBits * const &, SampleCountFlagBits const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default;
#else
    bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
#  endif
    }

    bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                         = StructureType::eAttachmentSampleCountInfoAMD;
    const void *                pNext                         = {};
    uint32_t                    colorAttachmentCount          = {};
    const SampleCountFlagBits * pColorAttachmentSamples       = {};
    SampleCountFlagBits         depthStencilAttachmentSamples = SampleCountFlagBits::e1;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentSampleCountInfoAMD>
  {
    using Type = AttachmentSampleCountInfoAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eAttachmentSampleCountInfoAMD>
  {
    using Type = AttachmentSampleCountInfoAMD;
  };

  using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;

  // wrapper struct for struct VkExtent2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent2D.html
  struct Extent2D
  {
    using NativeType = VkExtent2D;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
      : width{ width_ }
      , height{ height_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) ) {}

    Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Extent2D const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtent2D *>( this );
    }

    operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtent2D *>( this );
    }

    operator VkExtent2D const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExtent2D *>( this );
    }

    operator VkExtent2D *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExtent2D *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( width, height );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Extent2D const & ) const = default;
#else
    bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( width == rhs.width ) && ( height == rhs.height );
#  endif
    }

    bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExtent2D>
  {
    using Type = Extent2D;
  };
#endif

  // wrapper struct for struct VkSampleLocationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationEXT.html
  struct SampleLocationEXT
  {
    using NativeType = VkSampleLocationEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) ) {}

    SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SampleLocationEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSampleLocationEXT *>( this );
    }

    operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSampleLocationEXT *>( this );
    }

    operator VkSampleLocationEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSampleLocationEXT *>( this );
    }

    operator VkSampleLocationEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSampleLocationEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SampleLocationEXT const & ) const = default;
#else
    bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y );
#  endif
    }

    bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x = {};
    float y = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSampleLocationEXT>
  {
    using Type = SampleLocationEXT;
  };
#endif

  // wrapper struct for struct VkSampleLocationsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationsInfoEXT.html
  struct SampleLocationsInfoEXT
  {
    using NativeType = VkSampleLocationsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSampleLocationsInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleCountFlagBits       sampleLocationsPerPixel_ = SampleCountFlagBits::e1,
                                                 Extent2D                  sampleLocationGridSize_  = {},
                                                 uint32_t                  sampleLocationsCount_    = {},
                                                 const SampleLocationEXT * pSampleLocations_        = {},
                                                 const void *              pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampleLocationsPerPixel{ sampleLocationsPerPixel_ }
      , sampleLocationGridSize{ sampleLocationGridSize_ }
      , sampleLocationsCount{ sampleLocationsCount_ }
      , pSampleLocations{ pSampleLocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SampleLocationsInfoEXT( SampleCountFlagBits                                      sampleLocationsPerPixel_,
                            Extent2D                                                 sampleLocationGridSize_,
                            ArrayProxyNoTemporaries<const SampleLocationEXT> const & sampleLocations_,
                            const void *                                             pNext_ = nullptr )
      : pNext( pNext_ )
      , sampleLocationsPerPixel( sampleLocationsPerPixel_ )
      , sampleLocationGridSize( sampleLocationGridSize_ )
      , sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) )
      , pSampleLocations( sampleLocations_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsPerPixel = sampleLocationsPerPixel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize( Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationGridSize = sampleLocationGridSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsCount = sampleLocationsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations( const SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleLocations = pSampleLocations_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SampleLocationsInfoEXT & setSampleLocations( ArrayProxyNoTemporaries<const SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
      pSampleLocations     = sampleLocations_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
    }

    operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
    }

    operator VkSampleLocationsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
    }

    operator VkSampleLocationsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SampleCountFlagBits const &, Extent2D const &, uint32_t const &, const SampleLocationEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
#else
    bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
             ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) &&
             ( pSampleLocations == rhs.pSampleLocations );
#  endif
    }

    bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                   = StructureType::eSampleLocationsInfoEXT;
    const void *              pNext                   = {};
    SampleCountFlagBits       sampleLocationsPerPixel = SampleCountFlagBits::e1;
    Extent2D                  sampleLocationGridSize  = {};
    uint32_t                  sampleLocationsCount    = {};
    const SampleLocationEXT * pSampleLocations        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSampleLocationsInfoEXT>
  {
    using Type = SampleLocationsInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
  {
    using Type = SampleLocationsInfoEXT;
  };

  // wrapper struct for struct VkAttachmentSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleLocationsEXT.html
  struct AttachmentSampleLocationsEXT
  {
    using NativeType = VkAttachmentSampleLocationsEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentIndex{ attachmentIndex_ }
      , sampleLocationsInfo{ sampleLocationsInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
    {
    }

    AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentIndex = attachmentIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
    }

    operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
    }

    operator VkAttachmentSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
    }

    operator VkAttachmentSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, SampleLocationsInfoEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( attachmentIndex, sampleLocationsInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
#else
    bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#  endif
    }

    bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t               attachmentIndex     = {};
    SampleLocationsInfoEXT sampleLocationsInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkAttachmentSampleLocationsEXT>
  {
    using Type = AttachmentSampleLocationsEXT;
  };
#endif

  // wrapper struct for struct VkBaseInStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseInStructure.html
  struct BaseInStructure
  {
    using NativeType = VkBaseInStructure;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    BaseInStructure( StructureType sType_ = StructureType::eApplicationInfo, const struct BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : sType{ sType_ }
      , pNext{ pNext_ }
    {
    }

    BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) ) {}

    BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BaseInStructure const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBaseInStructure *>( this );
    }

    operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBaseInStructure *>( this );
    }

    operator VkBaseInStructure const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBaseInStructure *>( this );
    }

    operator VkBaseInStructure *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBaseInStructure *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const struct BaseInStructure * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BaseInStructure const & ) const = default;
#else
    bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType = StructureType::eApplicationInfo;
    const struct BaseInStructure * pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBaseInStructure>
  {
    using Type = BaseInStructure;
  };
#endif

  // wrapper struct for struct VkBaseOutStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseOutStructure.html
  struct BaseOutStructure
  {
    using NativeType = VkBaseOutStructure;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    BaseOutStructure( StructureType sType_ = StructureType::eApplicationInfo, struct BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : sType{ sType_ }
      , pNext{ pNext_ }
    {
    }

    BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) ) {}

    BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BaseOutStructure const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBaseOutStructure *>( this );
    }

    operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBaseOutStructure *>( this );
    }

    operator VkBaseOutStructure const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBaseOutStructure *>( this );
    }

    operator VkBaseOutStructure *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBaseOutStructure *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, struct BaseOutStructure * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BaseOutStructure const & ) const = default;
#else
    bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType = StructureType::eApplicationInfo;
    struct BaseOutStructure * pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBaseOutStructure>
  {
    using Type = BaseOutStructure;
  };
#endif

  // wrapper struct for struct VkBeginCustomResolveInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBeginCustomResolveInfoEXT.html
  struct BeginCustomResolveInfoEXT
  {
    using NativeType = VkBeginCustomResolveInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBeginCustomResolveInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BeginCustomResolveInfoEXT( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR BeginCustomResolveInfoEXT( BeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BeginCustomResolveInfoEXT( VkBeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BeginCustomResolveInfoEXT( *reinterpret_cast<BeginCustomResolveInfoEXT const *>( &rhs ) )
    {
    }

    BeginCustomResolveInfoEXT & operator=( BeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BeginCustomResolveInfoEXT & operator=( VkBeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BeginCustomResolveInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BeginCustomResolveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBeginCustomResolveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBeginCustomResolveInfoEXT *>( this );
    }

    operator VkBeginCustomResolveInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBeginCustomResolveInfoEXT *>( this );
    }

    operator VkBeginCustomResolveInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBeginCustomResolveInfoEXT *>( this );
    }

    operator VkBeginCustomResolveInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBeginCustomResolveInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BeginCustomResolveInfoEXT const & ) const = default;
#else
    bool operator==( BeginCustomResolveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( BeginCustomResolveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eBeginCustomResolveInfoEXT;
    void *        pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBeginCustomResolveInfoEXT>
  {
    using Type = BeginCustomResolveInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBeginCustomResolveInfoEXT>
  {
    using Type = BeginCustomResolveInfoEXT;
  };

  // wrapper struct for struct VkBindAccelerationStructureMemoryInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindAccelerationStructureMemoryInfoNV.html
  struct BindAccelerationStructureMemoryInfoNV
  {
    using NativeType = VkBindAccelerationStructureMemoryInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindAccelerationStructureMemoryInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV accelerationStructure_ = {},
                                                                DeviceMemory            memory_                = {},
                                                                DeviceSize              memoryOffset_          = {},
                                                                uint32_t                deviceIndexCount_      = {},
                                                                const uint32_t *        pDeviceIndices_        = {},
                                                                const void *            pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructure{ accelerationStructure_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
      , deviceIndexCount{ deviceIndexCount_ }
      , pDeviceIndices{ pDeviceIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV                         accelerationStructure_,
                                           DeviceMemory                                    memory_,
                                           DeviceSize                                      memoryOffset_,
                                           ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
                                           const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , accelerationStructure( accelerationStructure_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
      , pDeviceIndices( deviceIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
      setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindAccelerationStructureMemoryInfoNV & setDeviceIndices( ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

    operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

    operator VkBindAccelerationStructureMemoryInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

    operator VkBindAccelerationStructureMemoryInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureNV const &,
               DeviceMemory const &,
               DeviceSize const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
#else
    bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
#  endif
    }

    bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType                 = StructureType::eBindAccelerationStructureMemoryInfoNV;
    const void *            pNext                 = {};
    AccelerationStructureNV accelerationStructure = {};
    DeviceMemory            memory                = {};
    DeviceSize              memoryOffset          = {};
    uint32_t                deviceIndexCount      = {};
    const uint32_t *        pDeviceIndices        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindAccelerationStructureMemoryInfoNV>
  {
    using Type = BindAccelerationStructureMemoryInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
  {
    using Type = BindAccelerationStructureMemoryInfoNV;
  };

  // wrapper struct for struct VkBindBufferMemoryDeviceGroupInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryDeviceGroupInfo.html
  struct BindBufferMemoryDeviceGroupInfo
  {
    using NativeType = VkBindBufferMemoryDeviceGroupInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindBufferMemoryDeviceGroupInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t         deviceIndexCount_ = {},
                                                          const uint32_t * pDeviceIndices_   = {},
                                                          const void *     pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceIndexCount{ deviceIndexCount_ }
      , pDeviceIndices{ pDeviceIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindBufferMemoryDeviceGroupInfo( ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindBufferMemoryDeviceGroupInfo & setDeviceIndices( ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindBufferMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindBufferMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
#else
    bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices );
#  endif
    }

    bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eBindBufferMemoryDeviceGroupInfo;
    const void *     pNext            = {};
    uint32_t         deviceIndexCount = {};
    const uint32_t * pDeviceIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindBufferMemoryDeviceGroupInfo>
  {
    using Type = BindBufferMemoryDeviceGroupInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
  {
    using Type = BindBufferMemoryDeviceGroupInfo;
  };

  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;

  // wrapper struct for struct VkBindBufferMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryInfo.html
  struct BindBufferMemoryInfo
  {
    using NativeType = VkBindBufferMemoryInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindBufferMemoryInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindBufferMemoryInfo( Buffer buffer_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
    {
    }

    BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
    }

    operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindBufferMemoryInfo *>( this );
    }

    operator VkBindBufferMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
    }

    operator VkBindBufferMemoryInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindBufferMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &, DeviceMemory const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer, memory, memoryOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindBufferMemoryInfo const & ) const = default;
#else
    bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
#  endif
    }

    bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eBindBufferMemoryInfo;
    const void *  pNext        = {};
    Buffer        buffer       = {};
    DeviceMemory  memory       = {};
    DeviceSize    memoryOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindBufferMemoryInfo>
  {
    using Type = BindBufferMemoryInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
  {
    using Type = BindBufferMemoryInfo;
  };

  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;

  // wrapper struct for struct VkBindDataGraphPipelineSessionMemoryInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDataGraphPipelineSessionMemoryInfoARM.html
  struct BindDataGraphPipelineSessionMemoryInfoARM
  {
    using NativeType = VkBindDataGraphPipelineSessionMemoryInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindDataGraphPipelineSessionMemoryInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindDataGraphPipelineSessionMemoryInfoARM( DataGraphPipelineSessionARM          session_      = {},
                                                 DataGraphPipelineSessionBindPointARM bindPoint_    = DataGraphPipelineSessionBindPointARM::eTransient,
                                                 uint32_t                             objectIndex_  = {},
                                                 DeviceMemory                         memory_       = {},
                                                 DeviceSize                           memoryOffset_ = {},
                                                 const void *                         pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , session{ session_ }
      , bindPoint{ bindPoint_ }
      , objectIndex{ objectIndex_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindDataGraphPipelineSessionMemoryInfoARM( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindDataGraphPipelineSessionMemoryInfoARM( VkBindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindDataGraphPipelineSessionMemoryInfoARM( *reinterpret_cast<BindDataGraphPipelineSessionMemoryInfoARM const *>( &rhs ) )
    {
    }

    BindDataGraphPipelineSessionMemoryInfoARM & operator=( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindDataGraphPipelineSessionMemoryInfoARM & operator=( VkBindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindDataGraphPipelineSessionMemoryInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setSession( DataGraphPipelineSessionARM session_ ) VULKAN_HPP_NOEXCEPT
    {
      session = session_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      bindPoint = bindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setObjectIndex( uint32_t objectIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      objectIndex = objectIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindDataGraphPipelineSessionMemoryInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindDataGraphPipelineSessionMemoryInfoARM *>( this );
    }

    operator VkBindDataGraphPipelineSessionMemoryInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindDataGraphPipelineSessionMemoryInfoARM *>( this );
    }

    operator VkBindDataGraphPipelineSessionMemoryInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindDataGraphPipelineSessionMemoryInfoARM *>( this );
    }

    operator VkBindDataGraphPipelineSessionMemoryInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindDataGraphPipelineSessionMemoryInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DataGraphPipelineSessionARM const &,
               DataGraphPipelineSessionBindPointARM const &,
               uint32_t const &,
               DeviceMemory const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, session, bindPoint, objectIndex, memory, memoryOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindDataGraphPipelineSessionMemoryInfoARM const & ) const = default;
#else
    bool operator==( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session ) && ( bindPoint == rhs.bindPoint ) &&
             ( objectIndex == rhs.objectIndex ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
#  endif
    }

    bool operator!=( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType        = StructureType::eBindDataGraphPipelineSessionMemoryInfoARM;
    const void *                         pNext        = {};
    DataGraphPipelineSessionARM          session      = {};
    DataGraphPipelineSessionBindPointARM bindPoint    = DataGraphPipelineSessionBindPointARM::eTransient;
    uint32_t                             objectIndex  = {};
    DeviceMemory                         memory       = {};
    DeviceSize                           memoryOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindDataGraphPipelineSessionMemoryInfoARM>
  {
    using Type = BindDataGraphPipelineSessionMemoryInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindDataGraphPipelineSessionMemoryInfoARM>
  {
    using Type = BindDataGraphPipelineSessionMemoryInfoARM;
  };

  // wrapper struct for struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorBufferEmbeddedSamplersInfoEXT.html
  struct BindDescriptorBufferEmbeddedSamplersInfoEXT
  {
    using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( ShaderStageFlags stageFlags_ = {},
                                                                      PipelineLayout   layout_     = {},
                                                                      uint32_t         set_        = {},
                                                                      const void *     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stageFlags{ stageFlags_ }
      , layout{ layout_ }
      , set{ set_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast<BindDescriptorBufferEmbeddedSamplersInfoEXT const *>( &rhs ) )
    {
    }

    BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindDescriptorBufferEmbeddedSamplersInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
    {
      set = set_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( this );
    }

    operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( this );
    }

    operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( this );
    }

    operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ShaderStageFlags const &, PipelineLayout const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stageFlags, layout, set );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default;
#else
    bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set );
#  endif
    }

    bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType      = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT;
    const void *     pNext      = {};
    ShaderStageFlags stageFlags = {};
    PipelineLayout   layout     = {};
    uint32_t         set        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindDescriptorBufferEmbeddedSamplersInfoEXT>
  {
    using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT>
  {
    using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT;
  };

  // wrapper struct for struct VkBindDescriptorSetsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorSetsInfo.html
  struct BindDescriptorSetsInfo
  {
    using NativeType = VkBindDescriptorSetsInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindDescriptorSetsInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( ShaderStageFlags      stageFlags_         = {},
                                                 PipelineLayout        layout_             = {},
                                                 uint32_t              firstSet_           = {},
                                                 uint32_t              descriptorSetCount_ = {},
                                                 const DescriptorSet * pDescriptorSets_    = {},
                                                 uint32_t              dynamicOffsetCount_ = {},
                                                 const uint32_t *      pDynamicOffsets_    = {},
                                                 const void *          pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stageFlags{ stageFlags_ }
      , layout{ layout_ }
      , firstSet{ firstSet_ }
      , descriptorSetCount{ descriptorSetCount_ }
      , pDescriptorSets{ pDescriptorSets_ }
      , dynamicOffsetCount{ dynamicOffsetCount_ }
      , pDynamicOffsets{ pDynamicOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindDescriptorSetsInfo( *reinterpret_cast<BindDescriptorSetsInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindDescriptorSetsInfo( ShaderStageFlags                                     stageFlags_,
                            PipelineLayout                                       layout_,
                            uint32_t                                             firstSet_,
                            ArrayProxyNoTemporaries<const DescriptorSet> const & descriptorSets_,
                            ArrayProxyNoTemporaries<const uint32_t> const &      dynamicOffsets_ = {},
                            const void *                                         pNext_          = nullptr )
      : pNext( pNext_ )
      , stageFlags( stageFlags_ )
      , layout( layout_ )
      , firstSet( firstSet_ )
      , descriptorSetCount( static_cast<uint32_t>( descriptorSets_.size() ) )
      , pDescriptorSets( descriptorSets_.data() )
      , dynamicOffsetCount( static_cast<uint32_t>( dynamicOffsets_.size() ) )
      , pDynamicOffsets( dynamicOffsets_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindDescriptorSetsInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      firstSet = firstSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = descriptorSetCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorSets = pDescriptorSets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindDescriptorSetsInfo & setDescriptorSets( ArrayProxyNoTemporaries<const DescriptorSet> const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = static_cast<uint32_t>( descriptorSets_.size() );
      pDescriptorSets    = descriptorSets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicOffsetCount = dynamicOffsetCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicOffsets = pDynamicOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindDescriptorSetsInfo & setDynamicOffsets( ArrayProxyNoTemporaries<const uint32_t> const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicOffsetCount = static_cast<uint32_t>( dynamicOffsets_.size() );
      pDynamicOffsets    = dynamicOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindDescriptorSetsInfo *>( this );
    }

    operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindDescriptorSetsInfo *>( this );
    }

    operator VkBindDescriptorSetsInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindDescriptorSetsInfo *>( this );
    }

    operator VkBindDescriptorSetsInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindDescriptorSetsInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderStageFlags const &,
               PipelineLayout const &,
               uint32_t const &,
               uint32_t const &,
               const DescriptorSet * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindDescriptorSetsInfo const & ) const = default;
#else
    bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) &&
             ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorSets == rhs.pDescriptorSets ) &&
             ( dynamicOffsetCount == rhs.dynamicOffsetCount ) && ( pDynamicOffsets == rhs.pDynamicOffsets );
#  endif
    }

    bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType              = StructureType::eBindDescriptorSetsInfo;
    const void *          pNext              = {};
    ShaderStageFlags      stageFlags         = {};
    PipelineLayout        layout             = {};
    uint32_t              firstSet           = {};
    uint32_t              descriptorSetCount = {};
    const DescriptorSet * pDescriptorSets    = {};
    uint32_t              dynamicOffsetCount = {};
    const uint32_t *      pDynamicOffsets    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindDescriptorSetsInfo>
  {
    using Type = BindDescriptorSetsInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindDescriptorSetsInfo>
  {
    using Type = BindDescriptorSetsInfo;
  };

  using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo;

  // wrapper struct for struct VkOffset2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset2D.html
  struct Offset2D
  {
    using NativeType = VkOffset2D;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) ) {}

    Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Offset2D const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOffset2D *>( this );
    }

    operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOffset2D *>( this );
    }

    operator VkOffset2D const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOffset2D *>( this );
    }

    operator VkOffset2D *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOffset2D *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Offset2D const & ) const = default;
#else
    bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y );
#  endif
    }

    bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t x = {};
    int32_t y = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOffset2D>
  {
    using Type = Offset2D;
  };
#endif

  // wrapper struct for struct VkRect2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRect2D.html
  struct Rect2D
  {
    using NativeType = VkRect2D;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Rect2D( Offset2D offset_ = {}, Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : offset{ offset_ }
      , extent{ extent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) ) {}

    Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Rect2D const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRect2D *>( this );
    }

    operator VkRect2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRect2D *>( this );
    }

    operator VkRect2D const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRect2D *>( this );
    }

    operator VkRect2D *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRect2D *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Offset2D const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( offset, extent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Rect2D const & ) const = default;
#else
    bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( offset == rhs.offset ) && ( extent == rhs.extent );
#  endif
    }

    bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Offset2D offset = {};
    Extent2D extent = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRect2D>
  {
    using Type = Rect2D;
  };
#endif

  // wrapper struct for struct VkBindImageMemoryDeviceGroupInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryDeviceGroupInfo.html
  struct BindImageMemoryDeviceGroupInfo
  {
    using NativeType = VkBindImageMemoryDeviceGroupInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindImageMemoryDeviceGroupInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t         deviceIndexCount_             = {},
                                                         const uint32_t * pDeviceIndices_               = {},
                                                         uint32_t         splitInstanceBindRegionCount_ = {},
                                                         const Rect2D *   pSplitInstanceBindRegions_    = {},
                                                         const void *     pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceIndexCount{ deviceIndexCount_ }
      , pDeviceIndices{ pDeviceIndices_ }
      , splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ }
      , pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo( ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
                                    ArrayProxyNoTemporaries<const Rect2D> const &   splitInstanceBindRegions_ = {},
                                    const void *                                    pNext_                    = nullptr )
      : pNext( pNext_ )
      , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
      , pDeviceIndices( deviceIndices_.data() )
      , splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) )
      , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo & setDeviceIndices( ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( ArrayProxyNoTemporaries<const Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
      pSplitInstanceBindRegions    = splitInstanceBindRegions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindImageMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindImageMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const Rect2D * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
#else
    bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) &&
             ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
#  endif
    }

    bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                        = StructureType::eBindImageMemoryDeviceGroupInfo;
    const void *     pNext                        = {};
    uint32_t         deviceIndexCount             = {};
    const uint32_t * pDeviceIndices               = {};
    uint32_t         splitInstanceBindRegionCount = {};
    const Rect2D *   pSplitInstanceBindRegions    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindImageMemoryDeviceGroupInfo>
  {
    using Type = BindImageMemoryDeviceGroupInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
  {
    using Type = BindImageMemoryDeviceGroupInfo;
  };

  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;

  // wrapper struct for struct VkBindImageMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryInfo.html
  struct BindImageMemoryInfo
  {
    using NativeType = VkBindImageMemoryInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindImageMemoryInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindImageMemoryInfo( Image image_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
    {
    }

    BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindImageMemoryInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemoryInfo *>( this );
    }

    operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemoryInfo *>( this );
    }

    operator VkBindImageMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindImageMemoryInfo *>( this );
    }

    operator VkBindImageMemoryInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindImageMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &, DeviceMemory const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, memory, memoryOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemoryInfo const & ) const = default;
#else
    bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
#  endif
    }

    bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eBindImageMemoryInfo;
    const void *  pNext        = {};
    Image         image        = {};
    DeviceMemory  memory       = {};
    DeviceSize    memoryOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindImageMemoryInfo>
  {
    using Type = BindImageMemoryInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
  {
    using Type = BindImageMemoryInfo;
  };

  using BindImageMemoryInfoKHR = BindImageMemoryInfo;

  // wrapper struct for struct VkBindImageMemorySwapchainInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemorySwapchainInfoKHR.html
  struct BindImageMemorySwapchainInfoKHR
  {
    using NativeType = VkBindImageMemorySwapchainInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindImageMemorySwapchainInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchain{ swapchain_ }
      , imageIndex{ imageIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
    {
    }

    BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      imageIndex = imageIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
    }

    operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
    }

    operator VkBindImageMemorySwapchainInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
    }

    operator VkBindImageMemorySwapchainInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainKHR const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchain, imageIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
#else
    bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex );
#  endif
    }

    bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eBindImageMemorySwapchainInfoKHR;
    const void *  pNext      = {};
    SwapchainKHR  swapchain  = {};
    uint32_t      imageIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindImageMemorySwapchainInfoKHR>
  {
    using Type = BindImageMemorySwapchainInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
  {
    using Type = BindImageMemorySwapchainInfoKHR;
  };

  // wrapper struct for struct VkBindImagePlaneMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImagePlaneMemoryInfo.html
  struct BindImagePlaneMemoryInfo
  {
    using NativeType = VkBindImagePlaneMemoryInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindImagePlaneMemoryInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor,
                                                   const void *        pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , planeAspect{ planeAspect_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
    {
    }

    BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
    {
      planeAspect = planeAspect_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
    }

    operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
    }

    operator VkBindImagePlaneMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
    }

    operator VkBindImagePlaneMemoryInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageAspectFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, planeAspect );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
#else
    bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
#  endif
    }

    bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType       = StructureType::eBindImagePlaneMemoryInfo;
    const void *        pNext       = {};
    ImageAspectFlagBits planeAspect = ImageAspectFlagBits::eColor;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindImagePlaneMemoryInfo>
  {
    using Type = BindImagePlaneMemoryInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
  {
    using Type = BindImagePlaneMemoryInfo;
  };

  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;

  // wrapper struct for struct VkBindIndexBufferIndirectCommandEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandEXT.html
  struct BindIndexBufferIndirectCommandEXT
  {
    using NativeType = VkBindIndexBufferIndirectCommandEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindIndexBufferIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, IndexType indexType_ = IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
      : bufferAddress{ bufferAddress_ }
      , size{ size_ }
      , indexType{ indexType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindIndexBufferIndirectCommandEXT( *reinterpret_cast<BindIndexBufferIndirectCommandEXT const *>( &rhs ) )
    {
    }

    BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindIndexBufferIndirectCommandEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindIndexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindIndexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindIndexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindIndexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindIndexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindIndexBufferIndirectCommandEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, IndexType const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferAddress, size, indexType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default;
#else
    bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType );
#  endif
    }

    bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress bufferAddress = {};
    uint32_t      size          = {};
    IndexType     indexType     = IndexType::eUint16;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindIndexBufferIndirectCommandEXT>
  {
    using Type = BindIndexBufferIndirectCommandEXT;
  };
#endif

  // wrapper struct for struct VkBindIndexBufferIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandNV.html
  struct BindIndexBufferIndirectCommandNV
  {
    using NativeType = VkBindIndexBufferIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindIndexBufferIndirectCommandNV( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, IndexType indexType_ = IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
      : bufferAddress{ bufferAddress_ }
      , size{ size_ }
      , indexType{ indexType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
    {
    }

    BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV *>( this );
    }

    operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV *>( this );
    }

    operator VkBindIndexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindIndexBufferIndirectCommandNV *>( this );
    }

    operator VkBindIndexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindIndexBufferIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, IndexType const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferAddress, size, indexType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType );
#  endif
    }

    bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress bufferAddress = {};
    uint32_t      size          = {};
    IndexType     indexType     = IndexType::eUint16;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindIndexBufferIndirectCommandNV>
  {
    using Type = BindIndexBufferIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkBindMemoryStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindMemoryStatus.html
  struct BindMemoryStatus
  {
    using NativeType = VkBindMemoryStatus;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindMemoryStatus;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindMemoryStatus( Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pResult{ pResult_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatus( *reinterpret_cast<BindMemoryStatus const *>( &rhs ) ) {}

    BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindMemoryStatus const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( Result * pResult_ ) VULKAN_HPP_NOEXCEPT
    {
      pResult = pResult_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindMemoryStatus *>( this );
    }

    operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindMemoryStatus *>( this );
    }

    operator VkBindMemoryStatus const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindMemoryStatus *>( this );
    }

    operator VkBindMemoryStatus *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindMemoryStatus *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Result * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pResult );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindMemoryStatus const & ) const = default;
#else
    bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pResult == rhs.pResult );
#  endif
    }

    bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::eBindMemoryStatus;
    const void *  pNext   = {};
    Result *      pResult = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindMemoryStatus>
  {
    using Type = BindMemoryStatus;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindMemoryStatus>
  {
    using Type = BindMemoryStatus;
  };

  using BindMemoryStatusKHR = BindMemoryStatus;

  // wrapper struct for struct VkBindPipelineIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindPipelineIndirectCommandNV.html
  struct BindPipelineIndirectCommandNV
  {
    using NativeType = VkBindPipelineIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineAddress{ pipelineAddress_ } {}

    VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindPipelineIndirectCommandNV( *reinterpret_cast<BindPipelineIndirectCommandNV const *>( &rhs ) )
    {
    }

    BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindPipelineIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineAddress = pipelineAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindPipelineIndirectCommandNV *>( this );
    }

    operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindPipelineIndirectCommandNV *>( this );
    }

    operator VkBindPipelineIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindPipelineIndirectCommandNV *>( this );
    }

    operator VkBindPipelineIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindPipelineIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( pipelineAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( pipelineAddress == rhs.pipelineAddress );
#  endif
    }

    bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress pipelineAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindPipelineIndirectCommandNV>
  {
    using Type = BindPipelineIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkBindShaderGroupIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindShaderGroupIndirectCommandNV.html
  struct BindShaderGroupIndirectCommandNV
  {
    using NativeType = VkBindShaderGroupIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex{ groupIndex_ } {}

    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
    {
    }

    BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      groupIndex = groupIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV *>( this );
    }

    operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV *>( this );
    }

    operator VkBindShaderGroupIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindShaderGroupIndirectCommandNV *>( this );
    }

    operator VkBindShaderGroupIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindShaderGroupIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( groupIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( groupIndex == rhs.groupIndex );
#  endif
    }

    bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t groupIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindShaderGroupIndirectCommandNV>
  {
    using Type = BindShaderGroupIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkSparseMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBind.html
  struct SparseMemoryBind
  {
    using NativeType = VkSparseMemoryBind;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseMemoryBind( DeviceSize            resourceOffset_ = {},
                                           DeviceSize            size_           = {},
                                           DeviceMemory          memory_         = {},
                                           DeviceSize            memoryOffset_   = {},
                                           SparseMemoryBindFlags flags_          = {} ) VULKAN_HPP_NOEXCEPT
      : resourceOffset{ resourceOffset_ }
      , size{ size_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) ) {}

    SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseMemoryBind const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceOffset = resourceOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseMemoryBind *>( this );
    }

    operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseMemoryBind *>( this );
    }

    operator VkSparseMemoryBind const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseMemoryBind *>( this );
    }

    operator VkSparseMemoryBind *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseMemoryBind *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, DeviceSize const &, DeviceMemory const &, DeviceSize const &, SparseMemoryBindFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( resourceOffset, size, memory, memoryOffset, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseMemoryBind const & ) const = default;
#else
    bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) &&
             ( flags == rhs.flags );
#  endif
    }

    bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize            resourceOffset = {};
    DeviceSize            size           = {};
    DeviceMemory          memory         = {};
    DeviceSize            memoryOffset   = {};
    SparseMemoryBindFlags flags          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseMemoryBind>
  {
    using Type = SparseMemoryBind;
  };
#endif

  // wrapper struct for struct VkSparseBufferMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseBufferMemoryBindInfo.html
  struct SparseBufferMemoryBindInfo
  {
    using NativeType = VkSparseBufferMemoryBindInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( Buffer buffer_ = {}, uint32_t bindCount_ = {}, const SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer{ buffer_ }
      , bindCount{ bindCount_ }
      , pBinds{ pBinds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseBufferMemoryBindInfo( Buffer buffer_, ArrayProxyNoTemporaries<const SparseMemoryBind> const & binds_ )
      : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseBufferMemoryBindInfo & setBinds( ArrayProxyNoTemporaries<const SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo *>( this );
    }

    operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseBufferMemoryBindInfo *>( this );
    }

    operator VkSparseBufferMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseBufferMemoryBindInfo *>( this );
    }

    operator VkSparseBufferMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseBufferMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Buffer const &, uint32_t const &, const SparseMemoryBind * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( buffer, bindCount, pBinds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
#  endif
    }

    bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Buffer                   buffer    = {};
    uint32_t                 bindCount = {};
    const SparseMemoryBind * pBinds    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseBufferMemoryBindInfo>
  {
    using Type = SparseBufferMemoryBindInfo;
  };
#endif

  // wrapper struct for struct VkSparseImageOpaqueMemoryBindInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageOpaqueMemoryBindInfo.html
  struct SparseImageOpaqueMemoryBindInfo
  {
    using NativeType = VkSparseImageOpaqueMemoryBindInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageOpaqueMemoryBindInfo( Image image_ = {}, uint32_t bindCount_ = {}, const SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
      : image{ image_ }
      , bindCount{ bindCount_ }
      , pBinds{ pBinds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageOpaqueMemoryBindInfo( Image image_, ArrayProxyNoTemporaries<const SparseMemoryBind> const & binds_ )
      : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageOpaqueMemoryBindInfo & setBinds( ArrayProxyNoTemporaries<const SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

    operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

    operator VkSparseImageOpaqueMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

    operator VkSparseImageOpaqueMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Image const &, uint32_t const &, const SparseMemoryBind * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( image, bindCount, pBinds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
#  endif
    }

    bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Image                    image     = {};
    uint32_t                 bindCount = {};
    const SparseMemoryBind * pBinds    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageOpaqueMemoryBindInfo>
  {
    using Type = SparseImageOpaqueMemoryBindInfo;
  };
#endif

  // wrapper struct for struct VkImageSubresource, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource.html
  struct ImageSubresource
  {
    using NativeType = VkImageSubresource;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresource( ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask{ aspectMask_ }
      , mipLevel{ mipLevel_ }
      , arrayLayer{ arrayLayer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) ) {}

    ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSubresource const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevel = mipLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayLayer = arrayLayer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresource *>( this );
    }

    operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresource *>( this );
    }

    operator VkImageSubresource const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSubresource *>( this );
    }

    operator VkImageSubresource *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSubresource *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageAspectFlags const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( aspectMask, mipLevel, arrayLayer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresource const & ) const = default;
#else
    bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer );
#  endif
    }

    bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageAspectFlags aspectMask = {};
    uint32_t         mipLevel   = {};
    uint32_t         arrayLayer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSubresource>
  {
    using Type = ImageSubresource;
  };
#endif

  // wrapper struct for struct VkOffset3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset3D.html
  struct Offset3D
  {
    using NativeType = VkOffset3D;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
      , z{ z_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) ) {}

    explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {}

    Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Offset3D const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOffset3D *>( this );
    }

    operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOffset3D *>( this );
    }

    operator VkOffset3D const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOffset3D *>( this );
    }

    operator VkOffset3D *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOffset3D *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<int32_t const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y, z );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Offset3D const & ) const = default;
#else
    bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
#  endif
    }

    bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t x = {};
    int32_t y = {};
    int32_t z = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOffset3D>
  {
    using Type = Offset3D;
  };
#endif

  // wrapper struct for struct VkExtent3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent3D.html
  struct Extent3D
  {
    using NativeType = VkExtent3D;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
      : width{ width_ }
      , height{ height_ }
      , depth{ depth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) ) {}

    explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {}

    Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Extent3D const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtent3D *>( this );
    }

    operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtent3D *>( this );
    }

    operator VkExtent3D const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExtent3D *>( this );
    }

    operator VkExtent3D *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExtent3D *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( width, height, depth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Extent3D const & ) const = default;
#else
    bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
#  endif
    }

    bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
    uint32_t depth  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExtent3D>
  {
    using Type = Extent3D;
  };
#endif

  // wrapper struct for struct VkSparseImageMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBind.html
  struct SparseImageMemoryBind
  {
    using NativeType = VkSparseImageMemoryBind;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( ImageSubresource      subresource_  = {},
                                                Offset3D              offset_       = {},
                                                Extent3D              extent_       = {},
                                                DeviceMemory          memory_       = {},
                                                DeviceSize            memoryOffset_ = {},
                                                SparseMemoryBindFlags flags_        = {} ) VULKAN_HPP_NOEXCEPT
      : subresource{ subresource_ }
      , offset{ offset_ }
      , extent{ extent_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
    {
    }

    SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageMemoryBind const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
    {
      subresource = subresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryBind *>( this );
    }

    operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryBind *>( this );
    }

    operator VkSparseImageMemoryBind const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageMemoryBind *>( this );
    }

    operator VkSparseImageMemoryBind *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageMemoryBind *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageSubresource const &, Offset3D const &, Extent3D const &, DeviceMemory const &, DeviceSize const &, SparseMemoryBindFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( subresource, offset, extent, memory, memoryOffset, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryBind const & ) const = default;
#else
    bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageSubresource      subresource  = {};
    Offset3D              offset       = {};
    Extent3D              extent       = {};
    DeviceMemory          memory       = {};
    DeviceSize            memoryOffset = {};
    SparseMemoryBindFlags flags        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageMemoryBind>
  {
    using Type = SparseImageMemoryBind;
  };
#endif

  // wrapper struct for struct VkSparseImageMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBindInfo.html
  struct SparseImageMemoryBindInfo
  {
    using NativeType = VkSparseImageMemoryBindInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryBindInfo( Image image_ = {}, uint32_t bindCount_ = {}, const SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
      : image{ image_ }
      , bindCount{ bindCount_ }
      , pBinds{ pBinds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageMemoryBindInfo( Image image_, ArrayProxyNoTemporaries<const SparseImageMemoryBind> const & binds_ )
      : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageMemoryBindInfo & setBinds( ArrayProxyNoTemporaries<const SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryBindInfo *>( this );
    }

    operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryBindInfo *>( this );
    }

    operator VkSparseImageMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageMemoryBindInfo *>( this );
    }

    operator VkSparseImageMemoryBindInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Image const &, uint32_t const &, const SparseImageMemoryBind * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( image, bindCount, pBinds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
#  endif
    }

    bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Image                         image     = {};
    uint32_t                      bindCount = {};
    const SparseImageMemoryBind * pBinds    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageMemoryBindInfo>
  {
    using Type = SparseImageMemoryBindInfo;
  };
#endif

  // wrapper struct for struct VkBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindSparseInfo.html
  struct BindSparseInfo
  {
    using NativeType = VkBindSparseInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindSparseInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t                                waitSemaphoreCount_   = {},
                                         const Semaphore *                       pWaitSemaphores_      = {},
                                         uint32_t                                bufferBindCount_      = {},
                                         const SparseBufferMemoryBindInfo *      pBufferBinds_         = {},
                                         uint32_t                                imageOpaqueBindCount_ = {},
                                         const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_    = {},
                                         uint32_t                                imageBindCount_       = {},
                                         const SparseImageMemoryBindInfo *       pImageBinds_          = {},
                                         uint32_t                                signalSemaphoreCount_ = {},
                                         const Semaphore *                       pSignalSemaphores_    = {},
                                         const void *                            pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreCount{ waitSemaphoreCount_ }
      , pWaitSemaphores{ pWaitSemaphores_ }
      , bufferBindCount{ bufferBindCount_ }
      , pBufferBinds{ pBufferBinds_ }
      , imageOpaqueBindCount{ imageOpaqueBindCount_ }
      , pImageOpaqueBinds{ pImageOpaqueBinds_ }
      , imageBindCount{ imageBindCount_ }
      , pImageBinds{ pImageBinds_ }
      , signalSemaphoreCount{ signalSemaphoreCount_ }
      , pSignalSemaphores{ pSignalSemaphores_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo( ArrayProxyNoTemporaries<const Semaphore> const &                       waitSemaphores_,
                    ArrayProxyNoTemporaries<const SparseBufferMemoryBindInfo> const &      bufferBinds_      = {},
                    ArrayProxyNoTemporaries<const SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {},
                    ArrayProxyNoTemporaries<const SparseImageMemoryBindInfo> const &       imageBinds_       = {},
                    ArrayProxyNoTemporaries<const Semaphore> const &                       signalSemaphores_ = {},
                    const void *                                                           pNext_            = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) )
      , pBufferBinds( bufferBinds_.data() )
      , imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) )
      , pImageOpaqueBinds( imageOpaqueBinds_.data() )
      , imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) )
      , pImageBinds( imageBinds_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
      , pSignalSemaphores( signalSemaphores_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindSparseInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setWaitSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferBindCount = bufferBindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferBinds = pBufferBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setBufferBinds( ArrayProxyNoTemporaries<const SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
      pBufferBinds    = bufferBinds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOpaqueBindCount = imageOpaqueBindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageOpaqueBinds = pImageOpaqueBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setImageOpaqueBinds( ArrayProxyNoTemporaries<const SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
      pImageOpaqueBinds    = imageOpaqueBinds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageBindCount = imageBindCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageBinds = pImageBinds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setImageBinds( ArrayProxyNoTemporaries<const SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
      pImageBinds    = imageBinds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphores = pSignalSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setSignalSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
      pSignalSemaphores    = signalSemaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindSparseInfo *>( this );
    }

    operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindSparseInfo *>( this );
    }

    operator VkBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindSparseInfo *>( this );
    }

    operator VkBindSparseInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindSparseInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const Semaphore * const &,
               uint32_t const &,
               const SparseBufferMemoryBindInfo * const &,
               uint32_t const &,
               const SparseImageOpaqueMemoryBindInfo * const &,
               uint32_t const &,
               const SparseImageMemoryBindInfo * const &,
               uint32_t const &,
               const Semaphore * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       waitSemaphoreCount,
                       pWaitSemaphores,
                       bufferBindCount,
                       pBufferBinds,
                       imageOpaqueBindCount,
                       pImageOpaqueBinds,
                       imageBindCount,
                       pImageBinds,
                       signalSemaphoreCount,
                       pSignalSemaphores );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindSparseInfo const & ) const = default;
#else
    bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) &&
             ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) &&
             ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
             ( pSignalSemaphores == rhs.pSignalSemaphores );
#  endif
    }

    bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType                = StructureType::eBindSparseInfo;
    const void *                            pNext                = {};
    uint32_t                                waitSemaphoreCount   = {};
    const Semaphore *                       pWaitSemaphores      = {};
    uint32_t                                bufferBindCount      = {};
    const SparseBufferMemoryBindInfo *      pBufferBinds         = {};
    uint32_t                                imageOpaqueBindCount = {};
    const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds    = {};
    uint32_t                                imageBindCount       = {};
    const SparseImageMemoryBindInfo *       pImageBinds          = {};
    uint32_t                                signalSemaphoreCount = {};
    const Semaphore *                       pSignalSemaphores    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindSparseInfo>
  {
    using Type = BindSparseInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindSparseInfo>
  {
    using Type = BindSparseInfo;
  };

  // wrapper struct for struct VkBindTensorMemoryInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindTensorMemoryInfoARM.html
  struct BindTensorMemoryInfoARM
  {
    using NativeType = VkBindTensorMemoryInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindTensorMemoryInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM( TensorARM    tensor_       = {},
                                                  DeviceMemory memory_       = {},
                                                  DeviceSize   memoryOffset_ = {},
                                                  const void * pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensor{ tensor_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindTensorMemoryInfoARM( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindTensorMemoryInfoARM( *reinterpret_cast<BindTensorMemoryInfoARM const *>( &rhs ) )
    {
    }

    BindTensorMemoryInfoARM & operator=( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindTensorMemoryInfoARM & operator=( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindTensorMemoryInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindTensorMemoryInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindTensorMemoryInfoARM *>( this );
    }

    operator VkBindTensorMemoryInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindTensorMemoryInfoARM *>( this );
    }

    operator VkBindTensorMemoryInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindTensorMemoryInfoARM *>( this );
    }

    operator VkBindTensorMemoryInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindTensorMemoryInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorARM const &, DeviceMemory const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensor, memory, memoryOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindTensorMemoryInfoARM const & ) const = default;
#else
    bool operator==( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset );
#  endif
    }

    bool operator!=( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eBindTensorMemoryInfoARM;
    const void *  pNext        = {};
    TensorARM     tensor       = {};
    DeviceMemory  memory       = {};
    DeviceSize    memoryOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindTensorMemoryInfoARM>
  {
    using Type = BindTensorMemoryInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindTensorMemoryInfoARM>
  {
    using Type = BindTensorMemoryInfoARM;
  };

  // wrapper struct for struct VkBindVertexBufferIndirectCommandEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandEXT.html
  struct BindVertexBufferIndirectCommandEXT
  {
    using NativeType = VkBindVertexBufferIndirectCommandEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferAddress{ bufferAddress_ }
      , size{ size_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindVertexBufferIndirectCommandEXT( *reinterpret_cast<BindVertexBufferIndirectCommandEXT const *>( &rhs ) )
    {
    }

    BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindVertexBufferIndirectCommandEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindVertexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindVertexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindVertexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindVertexBufferIndirectCommandEXT *>( this );
    }

    operator VkBindVertexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindVertexBufferIndirectCommandEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferAddress, size, stride );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default;
#else
    bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride );
#  endif
    }

    bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress bufferAddress = {};
    uint32_t      size          = {};
    uint32_t      stride        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindVertexBufferIndirectCommandEXT>
  {
    using Type = BindVertexBufferIndirectCommandEXT;
  };
#endif

  // wrapper struct for struct VkBindVertexBufferIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandNV.html
  struct BindVertexBufferIndirectCommandNV
  {
    using NativeType = VkBindVertexBufferIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferAddress{ bufferAddress_ }
      , size{ size_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
    {
    }

    BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV *>( this );
    }

    operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV *>( this );
    }

    operator VkBindVertexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindVertexBufferIndirectCommandNV *>( this );
    }

    operator VkBindVertexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindVertexBufferIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferAddress, size, stride );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride );
#  endif
    }

    bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress bufferAddress = {};
    uint32_t      size          = {};
    uint32_t      stride        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindVertexBufferIndirectCommandNV>
  {
    using Type = BindVertexBufferIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkBindVideoSessionMemoryInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVideoSessionMemoryInfoKHR.html
  struct BindVideoSessionMemoryInfoKHR
  {
    using NativeType = VkBindVideoSessionMemoryInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBindVideoSessionMemoryInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t     memoryBindIndex_ = {},
                                                        DeviceMemory memory_          = {},
                                                        DeviceSize   memoryOffset_    = {},
                                                        DeviceSize   memorySize_      = {},
                                                        const void * pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryBindIndex{ memoryBindIndex_ }
      , memory{ memory_ }
      , memoryOffset{ memoryOffset_ }
      , memorySize{ memorySize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindVideoSessionMemoryInfoKHR( *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs ) )
    {
    }

    BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BindVideoSessionMemoryInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBindIndex = memoryBindIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
    {
      memorySize = memorySize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( this );
    }

    operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindVideoSessionMemoryInfoKHR *>( this );
    }

    operator VkBindVideoSessionMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( this );
    }

    operator VkBindVideoSessionMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBindVideoSessionMemoryInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, DeviceMemory const &, DeviceSize const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default;
#else
    bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize );
#  endif
    }

    bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eBindVideoSessionMemoryInfoKHR;
    const void *  pNext           = {};
    uint32_t      memoryBindIndex = {};
    DeviceMemory  memory          = {};
    DeviceSize    memoryOffset    = {};
    DeviceSize    memorySize      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBindVideoSessionMemoryInfoKHR>
  {
    using Type = BindVideoSessionMemoryInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBindVideoSessionMemoryInfoKHR>
  {
    using Type = BindVideoSessionMemoryInfoKHR;
  };

  // wrapper struct for struct VkBlitImageCubicWeightsInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageCubicWeightsInfoQCOM.html
  struct BlitImageCubicWeightsInfoQCOM
  {
    using NativeType = VkBlitImageCubicWeightsInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBlitImageCubicWeightsInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( CubicFilterWeightsQCOM cubicWeights_ = CubicFilterWeightsQCOM::eCatmullRom,
                                                        const void *           pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cubicWeights{ cubicWeights_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : BlitImageCubicWeightsInfoQCOM( *reinterpret_cast<BlitImageCubicWeightsInfoQCOM const *>( &rhs ) )
    {
    }

    BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BlitImageCubicWeightsInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT
    {
      cubicWeights = cubicWeights_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBlitImageCubicWeightsInfoQCOM *>( this );
    }

    operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBlitImageCubicWeightsInfoQCOM *>( this );
    }

    operator VkBlitImageCubicWeightsInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBlitImageCubicWeightsInfoQCOM *>( this );
    }

    operator VkBlitImageCubicWeightsInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBlitImageCubicWeightsInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CubicFilterWeightsQCOM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cubicWeights );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default;
#else
    bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights );
#  endif
    }

    bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType        = StructureType::eBlitImageCubicWeightsInfoQCOM;
    const void *           pNext        = {};
    CubicFilterWeightsQCOM cubicWeights = CubicFilterWeightsQCOM::eCatmullRom;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBlitImageCubicWeightsInfoQCOM>
  {
    using Type = BlitImageCubicWeightsInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBlitImageCubicWeightsInfoQCOM>
  {
    using Type = BlitImageCubicWeightsInfoQCOM;
  };

  // wrapper struct for struct VkImageSubresourceLayers, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceLayers.html
  struct ImageSubresourceLayers
  {
    using NativeType = VkImageSubresourceLayers;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageAspectFlags aspectMask_     = {},
                                                 uint32_t         mipLevel_       = {},
                                                 uint32_t         baseArrayLayer_ = {},
                                                 uint32_t         layerCount_     = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask{ aspectMask_ }
      , mipLevel{ mipLevel_ }
      , baseArrayLayer{ baseArrayLayer_ }
      , layerCount{ layerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
    {
    }

    ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSubresourceLayers const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevel = mipLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresourceLayers *>( this );
    }

    operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresourceLayers *>( this );
    }

    operator VkImageSubresourceLayers const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSubresourceLayers *>( this );
    }

    operator VkImageSubresourceLayers *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSubresourceLayers *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresourceLayers const & ) const = default;
#else
    bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
#  endif
    }

    bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageAspectFlags aspectMask     = {};
    uint32_t         mipLevel       = {};
    uint32_t         baseArrayLayer = {};
    uint32_t         layerCount     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSubresourceLayers>
  {
    using Type = ImageSubresourceLayers;
  };
#endif

  // wrapper struct for struct VkImageBlit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit2.html
  struct ImageBlit2
  {
    using NativeType = VkImageBlit2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageBlit2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageSubresourceLayers          srcSubresource_ = {},
                                        std::array<Offset3D, 2> const & srcOffsets_     = {},
                                        ImageSubresourceLayers          dstSubresource_ = {},
                                        std::array<Offset3D, 2> const & dstOffsets_     = {},
                                        const void *                    pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSubresource{ srcSubresource_ }
      , srcOffsets{ srcOffsets_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffsets{ dstOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) ) {}

    ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageBlit2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffsets = srcOffsets_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffsets = dstOffsets_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageBlit2 *>( this );
    }

    operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageBlit2 *>( this );
    }

    operator VkImageBlit2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageBlit2 *>( this );
    }

    operator VkImageBlit2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageBlit2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageSubresourceLayers const &,
               ArrayWrapper1D<Offset3D, 2> const &,
               ImageSubresourceLayers const &,
               ArrayWrapper1D<Offset3D, 2> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageBlit2 const & ) const = default;
#else
    bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
#  endif
    }

    bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType          = StructureType::eImageBlit2;
    const void *                pNext          = {};
    ImageSubresourceLayers      srcSubresource = {};
    ArrayWrapper1D<Offset3D, 2> srcOffsets     = {};
    ImageSubresourceLayers      dstSubresource = {};
    ArrayWrapper1D<Offset3D, 2> dstOffsets     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageBlit2>
  {
    using Type = ImageBlit2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageBlit2>
  {
    using Type = ImageBlit2;
  };

  using ImageBlit2KHR = ImageBlit2;

  // wrapper struct for struct VkBlitImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageInfo2.html
  struct BlitImageInfo2
  {
    using NativeType = VkBlitImageInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBlitImageInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( Image              srcImage_       = {},
                                            ImageLayout        srcImageLayout_ = ImageLayout::eUndefined,
                                            Image              dstImage_       = {},
                                            ImageLayout        dstImageLayout_ = ImageLayout::eUndefined,
                                            uint32_t           regionCount_    = {},
                                            const ImageBlit2 * pRegions_       = {},
                                            Filter             filter_         = Filter::eNearest,
                                            const void *       pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
      , filter{ filter_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BlitImageInfo2( Image                                             srcImage_,
                    ImageLayout                                       srcImageLayout_,
                    Image                                             dstImage_,
                    ImageLayout                                       dstImageLayout_,
                    ArrayProxyNoTemporaries<const ImageBlit2> const & regions_,
                    Filter                                            filter_ = Filter::eNearest,
                    const void *                                      pNext_  = nullptr )
      : pNext( pNext_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
      , filter( filter_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BlitImageInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BlitImageInfo2 & setRegions( ArrayProxyNoTemporaries<const ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( Filter filter_ ) VULKAN_HPP_NOEXCEPT
    {
      filter = filter_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBlitImageInfo2 *>( this );
    }

    operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBlitImageInfo2 *>( this );
    }

    operator VkBlitImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBlitImageInfo2 *>( this );
    }

    operator VkBlitImageInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBlitImageInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Image const &,
               ImageLayout const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const ImageBlit2 * const &,
               Filter const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BlitImageInfo2 const & ) const = default;
#else
    bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
             ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) &&
             ( filter == rhs.filter );
#  endif
    }

    bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType          = StructureType::eBlitImageInfo2;
    const void *       pNext          = {};
    Image              srcImage       = {};
    ImageLayout        srcImageLayout = ImageLayout::eUndefined;
    Image              dstImage       = {};
    ImageLayout        dstImageLayout = ImageLayout::eUndefined;
    uint32_t           regionCount    = {};
    const ImageBlit2 * pRegions       = {};
    Filter             filter         = Filter::eNearest;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBlitImageInfo2>
  {
    using Type = BlitImageInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBlitImageInfo2>
  {
    using Type = BlitImageInfo2;
  };

  using BlitImageInfo2KHR = BlitImageInfo2;

  // wrapper struct for struct VkBufferCaptureDescriptorDataInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCaptureDescriptorDataInfoEXT.html
  struct BufferCaptureDescriptorDataInfoEXT
  {
    using NativeType = VkBufferCaptureDescriptorDataInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCaptureDescriptorDataInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast<BufferCaptureDescriptorDataInfoEXT const *>( &rhs ) )
    {
    }

    BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCaptureDescriptorDataInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkBufferCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkBufferCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCaptureDescriptorDataInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default;
#else
    bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eBufferCaptureDescriptorDataInfoEXT;
    const void *  pNext  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCaptureDescriptorDataInfoEXT>
  {
    using Type = BufferCaptureDescriptorDataInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCaptureDescriptorDataInfoEXT>
  {
    using Type = BufferCaptureDescriptorDataInfoEXT;
  };

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferCollectionBufferCreateInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionBufferCreateInfoFUCHSIA.html
  struct BufferCollectionBufferCreateInfoFUCHSIA
  {
    using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionFUCHSIA collection_ = {},
                                                                  uint32_t                index_      = {},
                                                                  const void *            pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , collection{ collection_ }
      , index{ index_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
    {
    }

    BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
    {
      collection = collection_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionBufferCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionBufferCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, BufferCollectionFUCHSIA const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, collection, index );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
#    endif
    }

    bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType           sType      = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
    const void *            pNext      = {};
    BufferCollectionFUCHSIA collection = {};
    uint32_t                index      = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCollectionBufferCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionBufferCreateInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionBufferCreateInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferCollectionConstraintsInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionConstraintsInfoFUCHSIA.html
  struct BufferCollectionConstraintsInfoFUCHSIA
  {
    using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t     minBufferCount_                  = {},
                                                                 uint32_t     maxBufferCount_                  = {},
                                                                 uint32_t     minBufferCountForCamping_        = {},
                                                                 uint32_t     minBufferCountForDedicatedSlack_ = {},
                                                                 uint32_t     minBufferCountForSharedSlack_    = {},
                                                                 const void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minBufferCount{ minBufferCount_ }
      , maxBufferCount{ maxBufferCount_ }
      , minBufferCountForCamping{ minBufferCountForCamping_ }
      , minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ }
      , minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
    {
    }

    BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      minBufferCount = minBufferCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxBufferCount = maxBufferCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
    {
      minBufferCountForCamping = minBufferCountForCamping_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
      setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
    {
      minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &
      setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
    {
      minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) &&
             ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) &&
             ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
#    endif
    }

    bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                           = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
    const void *  pNext                           = {};
    uint32_t      minBufferCount                  = {};
    uint32_t      maxBufferCount                  = {};
    uint32_t      minBufferCountForCamping        = {};
    uint32_t      minBufferCountForDedicatedSlack = {};
    uint32_t      minBufferCountForSharedSlack    = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCollectionConstraintsInfoFUCHSIA>
  {
    using Type = BufferCollectionConstraintsInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
  {
    using Type = BufferCollectionConstraintsInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferCollectionCreateInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionCreateInfoFUCHSIA.html
  struct BufferCollectionCreateInfoFUCHSIA
  {
    using NativeType = VkBufferCollectionCreateInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCollectionCreateInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , collectionToken{ collectionToken_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
    {
    }

    BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
    {
      collectionToken = collectionToken_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, zx_handle_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, collectionToken );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType           = StructureType::eBufferCollectionCreateInfoFUCHSIA;
    const void *  pNext           = {};
    zx_handle_t   collectionToken = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCollectionCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionCreateInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionCreateInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferCollectionImageCreateInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionImageCreateInfoFUCHSIA.html
  struct BufferCollectionImageCreateInfoFUCHSIA
  {
    using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionFUCHSIA collection_ = {},
                                                                 uint32_t                index_      = {},
                                                                 const void *            pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , collection{ collection_ }
      , index{ index_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
    {
    }

    BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
    {
      collection = collection_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionImageCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
    }

    operator VkBufferCollectionImageCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, BufferCollectionFUCHSIA const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, collection, index );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
#    endif
    }

    bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType           sType      = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
    const void *            pNext      = {};
    BufferCollectionFUCHSIA collection = {};
    uint32_t                index      = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCollectionImageCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionImageCreateInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
  {
    using Type = BufferCollectionImageCreateInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkSysmemColorSpaceFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSysmemColorSpaceFUCHSIA.html
  struct SysmemColorSpaceFUCHSIA
  {
    using NativeType = VkSysmemColorSpaceFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSysmemColorSpaceFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorSpace{ colorSpace_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
    {
    }

    SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT
    {
      colorSpace = colorSpace_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA *>( this );
    }

    operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA *>( this );
    }

    operator VkSysmemColorSpaceFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSysmemColorSpaceFUCHSIA *>( this );
    }

    operator VkSysmemColorSpaceFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSysmemColorSpaceFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorSpace );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default;
#  else
    bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace );
#    endif
    }

    bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType      = StructureType::eSysmemColorSpaceFUCHSIA;
    const void *  pNext      = {};
    uint32_t      colorSpace = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSysmemColorSpaceFUCHSIA>
  {
    using Type = SysmemColorSpaceFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
  {
    using Type = SysmemColorSpaceFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferCollectionPropertiesFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionPropertiesFUCHSIA.html
  struct BufferCollectionPropertiesFUCHSIA
  {
    using NativeType = VkBufferCollectionPropertiesFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCollectionPropertiesFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( uint32_t                    memoryTypeBits_                   = {},
                                                            uint32_t                    bufferCount_                      = {},
                                                            uint32_t                    createInfoIndex_                  = {},
                                                            uint64_t                    sysmemPixelFormat_                = {},
                                                            FormatFeatureFlags          formatFeatures_                   = {},
                                                            SysmemColorSpaceFUCHSIA     sysmemColorSpaceIndex_            = {},
                                                            ComponentMapping            samplerYcbcrConversionComponents_ = {},
                                                            SamplerYcbcrModelConversion suggestedYcbcrModel_    = SamplerYcbcrModelConversion::eRgbIdentity,
                                                            SamplerYcbcrRange           suggestedYcbcrRange_    = SamplerYcbcrRange::eItuFull,
                                                            ChromaLocation              suggestedXChromaOffset_ = ChromaLocation::eCositedEven,
                                                            ChromaLocation              suggestedYChromaOffset_ = ChromaLocation::eCositedEven,
                                                            void *                      pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
      , bufferCount{ bufferCount_ }
      , createInfoIndex{ createInfoIndex_ }
      , sysmemPixelFormat{ sysmemPixelFormat_ }
      , formatFeatures{ formatFeatures_ }
      , sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ }
      , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }
      , suggestedYcbcrModel{ suggestedYcbcrModel_ }
      , suggestedYcbcrRange{ suggestedYcbcrRange_ }
      , suggestedXChromaOffset{ suggestedXChromaOffset_ }
      , suggestedYChromaOffset{ suggestedYChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
    {
    }

    BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs );
      return *this;
    }

    operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA *>( this );
    }

    operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( this );
    }

    operator VkBufferCollectionPropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA *>( this );
    }

    operator VkBufferCollectionPropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint64_t const &,
               FormatFeatureFlags const &,
               SysmemColorSpaceFUCHSIA const &,
               ComponentMapping const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ChromaLocation const &,
               ChromaLocation const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       memoryTypeBits,
                       bufferCount,
                       createInfoIndex,
                       sysmemPixelFormat,
                       formatFeatures,
                       sysmemColorSpaceIndex,
                       samplerYcbcrConversionComponents,
                       suggestedYcbcrModel,
                       suggestedYcbcrRange,
                       suggestedXChromaOffset,
                       suggestedYChromaOffset );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default;
#  else
    bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) &&
             ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
             ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
             ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
             ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#    endif
    }

    bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                            = StructureType::eBufferCollectionPropertiesFUCHSIA;
    void *                      pNext                            = {};
    uint32_t                    memoryTypeBits                   = {};
    uint32_t                    bufferCount                      = {};
    uint32_t                    createInfoIndex                  = {};
    uint64_t                    sysmemPixelFormat                = {};
    FormatFeatureFlags          formatFeatures                   = {};
    SysmemColorSpaceFUCHSIA     sysmemColorSpaceIndex            = {};
    ComponentMapping            samplerYcbcrConversionComponents = {};
    SamplerYcbcrModelConversion suggestedYcbcrModel              = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           suggestedYcbcrRange              = SamplerYcbcrRange::eItuFull;
    ChromaLocation              suggestedXChromaOffset           = ChromaLocation::eCositedEven;
    ChromaLocation              suggestedYChromaOffset           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCollectionPropertiesFUCHSIA>
  {
    using Type = BufferCollectionPropertiesFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
  {
    using Type = BufferCollectionPropertiesFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkBufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateInfo.html
  struct BufferCreateInfo
  {
    using NativeType = VkBufferCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateFlags flags_                 = {},
                                           DeviceSize        size_                  = {},
                                           BufferUsageFlags  usage_                 = {},
                                           SharingMode       sharingMode_           = SharingMode::eExclusive,
                                           uint32_t          queueFamilyIndexCount_ = {},
                                           const uint32_t *  pQueueFamilyIndices_   = {},
                                           const void *      pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , size{ size_ }
      , usage{ usage_ }
      , sharingMode{ sharingMode_ }
      , queueFamilyIndexCount{ queueFamilyIndexCount_ }
      , pQueueFamilyIndices{ pQueueFamilyIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BufferCreateInfo( BufferCreateFlags                               flags_,
                      DeviceSize                                      size_,
                      BufferUsageFlags                                usage_,
                      SharingMode                                     sharingMode_,
                      ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                      const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , size( size_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BufferCreateInfo & setQueueFamilyIndices( ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCreateInfo *>( this );
    }

    operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCreateInfo *>( this );
    }

    operator VkBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCreateInfo *>( this );
    }

    operator VkBufferCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               BufferCreateFlags const &,
               DeviceSize const &,
               BufferUsageFlags const &,
               SharingMode const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCreateInfo const & ) const = default;
#else
    bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) &&
             ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#  endif
    }

    bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType                 = StructureType::eBufferCreateInfo;
    const void *      pNext                 = {};
    BufferCreateFlags flags                 = {};
    DeviceSize        size                  = {};
    BufferUsageFlags  usage                 = {};
    SharingMode       sharingMode           = SharingMode::eExclusive;
    uint32_t          queueFamilyIndexCount = {};
    const uint32_t *  pQueueFamilyIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCreateInfo>
  {
    using Type = BufferCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCreateInfo>
  {
    using Type = BufferCreateInfo;
  };

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkBufferConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferConstraintsInfoFUCHSIA.html
  struct BufferConstraintsInfoFUCHSIA
  {
    using NativeType = VkBufferConstraintsInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferConstraintsInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferCreateInfo                       createInfo_                  = {},
                                                       FormatFeatureFlags                     requiredFormatFeatures_      = {},
                                                       BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {},
                                                       const void *                           pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , createInfo{ createInfo_ }
      , requiredFormatFeatures{ requiredFormatFeatures_ }
      , bufferCollectionConstraints{ bufferCollectionConstraints_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
    {
    }

    BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      createInfo = createInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      requiredFormatFeatures = requiredFormatFeatures_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &
      setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferCollectionConstraints = bufferCollectionConstraints_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( this );
    }

    operator VkBufferConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferConstraintsInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, BufferCreateInfo const &, FormatFeatureFlags const &, BufferCollectionConstraintsInfoFUCHSIA const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) &&
             ( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
#    endif
    }

    bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                          sType                       = StructureType::eBufferConstraintsInfoFUCHSIA;
    const void *                           pNext                       = {};
    BufferCreateInfo                       createInfo                  = {};
    FormatFeatureFlags                     requiredFormatFeatures      = {};
    BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferConstraintsInfoFUCHSIA>
  {
    using Type = BufferConstraintsInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
  {
    using Type = BufferConstraintsInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkBufferCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy.html
  struct BufferCopy
  {
    using NativeType = VkBufferCopy;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCopy( DeviceSize srcOffset_ = {}, DeviceSize dstOffset_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcOffset{ srcOffset_ }
      , dstOffset{ dstOffset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) ) {}

    BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCopy const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCopy *>( this );
    }

    operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCopy *>( this );
    }

    operator VkBufferCopy const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCopy *>( this );
    }

    operator VkBufferCopy *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCopy *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcOffset, dstOffset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCopy const & ) const = default;
#else
    bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize srcOffset = {};
    DeviceSize dstOffset = {};
    DeviceSize size      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCopy>
  {
    using Type = BufferCopy;
  };
#endif

  // wrapper struct for struct VkBufferCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy2.html
  struct BufferCopy2
  {
    using NativeType = VkBufferCopy2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferCopy2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BufferCopy2( DeviceSize srcOffset_ = {}, DeviceSize dstOffset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcOffset{ srcOffset_ }
      , dstOffset{ dstOffset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) ) {}

    BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferCopy2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCopy2 *>( this );
    }

    operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCopy2 *>( this );
    }

    operator VkBufferCopy2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferCopy2 *>( this );
    }

    operator VkBufferCopy2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferCopy2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcOffset, dstOffset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCopy2 const & ) const = default;
#else
    bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eBufferCopy2;
    const void *  pNext     = {};
    DeviceSize    srcOffset = {};
    DeviceSize    dstOffset = {};
    DeviceSize    size      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferCopy2>
  {
    using Type = BufferCopy2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferCopy2>
  {
    using Type = BufferCopy2;
  };

  using BufferCopy2KHR = BufferCopy2;

  // wrapper struct for struct VkBufferDeviceAddressCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressCreateInfoEXT.html
  struct BufferDeviceAddressCreateInfoEXT
  {
    using NativeType = VkBufferDeviceAddressCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferDeviceAddressCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceAddress{ deviceAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
    {
    }

    BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

    operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

    operator VkBufferDeviceAddressCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

    operator VkBufferDeviceAddressCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
#else
    bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress );
#  endif
    }

    bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eBufferDeviceAddressCreateInfoEXT;
    const void *  pNext         = {};
    DeviceAddress deviceAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferDeviceAddressCreateInfoEXT>
  {
    using Type = BufferDeviceAddressCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
  {
    using Type = BufferDeviceAddressCreateInfoEXT;
  };

  // wrapper struct for struct VkBufferDeviceAddressInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressInfo.html
  struct BufferDeviceAddressInfo
  {
    using NativeType = VkBufferDeviceAddressInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferDeviceAddressInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
    {
    }

    BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
    }

    operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
    }

    operator VkBufferDeviceAddressInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
    }

    operator VkBufferDeviceAddressInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
#else
    bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eBufferDeviceAddressInfo;
    const void *  pNext  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferDeviceAddressInfo>
  {
    using Type = BufferDeviceAddressInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
  {
    using Type = BufferDeviceAddressInfo;
  };

  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
  using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;

  // wrapper struct for struct VkBufferImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy.html
  struct BufferImageCopy
  {
    using NativeType = VkBufferImageCopy;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferImageCopy( DeviceSize             bufferOffset_      = {},
                                          uint32_t               bufferRowLength_   = {},
                                          uint32_t               bufferImageHeight_ = {},
                                          ImageSubresourceLayers imageSubresource_  = {},
                                          Offset3D               imageOffset_       = {},
                                          Extent3D               imageExtent_       = {} ) VULKAN_HPP_NOEXCEPT
      : bufferOffset{ bufferOffset_ }
      , bufferRowLength{ bufferRowLength_ }
      , bufferImageHeight{ bufferImageHeight_ }
      , imageSubresource{ imageSubresource_ }
      , imageOffset{ imageOffset_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) ) {}

    BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferImageCopy const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferOffset = bufferOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferRowLength = bufferRowLength_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferImageHeight = bufferImageHeight_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferImageCopy *>( this );
    }

    operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferImageCopy *>( this );
    }

    operator VkBufferImageCopy const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferImageCopy *>( this );
    }

    operator VkBufferImageCopy *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferImageCopy *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, uint32_t const &, uint32_t const &, ImageSubresourceLayers const &, Offset3D const &, Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferImageCopy const & ) const = default;
#else
    bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
             ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize             bufferOffset      = {};
    uint32_t               bufferRowLength   = {};
    uint32_t               bufferImageHeight = {};
    ImageSubresourceLayers imageSubresource  = {};
    Offset3D               imageOffset       = {};
    Extent3D               imageExtent       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferImageCopy>
  {
    using Type = BufferImageCopy;
  };
#endif

  // wrapper struct for struct VkBufferImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy2.html
  struct BufferImageCopy2
  {
    using NativeType = VkBufferImageCopy2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferImageCopy2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferImageCopy2( DeviceSize             bufferOffset_      = {},
                                           uint32_t               bufferRowLength_   = {},
                                           uint32_t               bufferImageHeight_ = {},
                                           ImageSubresourceLayers imageSubresource_  = {},
                                           Offset3D               imageOffset_       = {},
                                           Extent3D               imageExtent_       = {},
                                           const void *           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , bufferOffset{ bufferOffset_ }
      , bufferRowLength{ bufferRowLength_ }
      , bufferImageHeight{ bufferImageHeight_ }
      , imageSubresource{ imageSubresource_ }
      , imageOffset{ imageOffset_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) ) {}

    BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferImageCopy2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferOffset = bufferOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferRowLength = bufferRowLength_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferImageHeight = bufferImageHeight_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferImageCopy2 *>( this );
    }

    operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferImageCopy2 *>( this );
    }

    operator VkBufferImageCopy2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferImageCopy2 *>( this );
    }

    operator VkBufferImageCopy2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferImageCopy2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferImageCopy2 const & ) const = default;
#else
    bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
             ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
             ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType             = StructureType::eBufferImageCopy2;
    const void *           pNext             = {};
    DeviceSize             bufferOffset      = {};
    uint32_t               bufferRowLength   = {};
    uint32_t               bufferImageHeight = {};
    ImageSubresourceLayers imageSubresource  = {};
    Offset3D               imageOffset       = {};
    Extent3D               imageExtent       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferImageCopy2>
  {
    using Type = BufferImageCopy2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferImageCopy2>
  {
    using Type = BufferImageCopy2;
  };

  using BufferImageCopy2KHR = BufferImageCopy2;

  // wrapper struct for struct VkBufferMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier.html
  struct BufferMemoryBarrier
  {
    using NativeType = VkBufferMemoryBarrier;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferMemoryBarrier;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( AccessFlags  srcAccessMask_       = {},
                                              AccessFlags  dstAccessMask_       = {},
                                              uint32_t     srcQueueFamilyIndex_ = {},
                                              uint32_t     dstQueueFamilyIndex_ = {},
                                              Buffer       buffer_              = {},
                                              DeviceSize   offset_              = {},
                                              DeviceSize   size_                = {},
                                              const void * pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , srcQueueFamilyIndex{ srcQueueFamilyIndex_ }
      , dstQueueFamilyIndex{ dstQueueFamilyIndex_ }
      , buffer{ buffer_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
    {
    }

    BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferMemoryBarrier const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryBarrier *>( this );
    }

    operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryBarrier *>( this );
    }

    operator VkBufferMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferMemoryBarrier *>( this );
    }

    operator VkBufferMemoryBarrier *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccessFlags const &,
               AccessFlags const &,
               uint32_t const &,
               uint32_t const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryBarrier const & ) const = default;
#else
    bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
             ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
             ( offset == rhs.offset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eBufferMemoryBarrier;
    const void *  pNext               = {};
    AccessFlags   srcAccessMask       = {};
    AccessFlags   dstAccessMask       = {};
    uint32_t      srcQueueFamilyIndex = {};
    uint32_t      dstQueueFamilyIndex = {};
    Buffer        buffer              = {};
    DeviceSize    offset              = {};
    DeviceSize    size                = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferMemoryBarrier>
  {
    using Type = BufferMemoryBarrier;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
  {
    using Type = BufferMemoryBarrier;
  };

  // wrapper struct for struct VkBufferMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier2.html
  struct BufferMemoryBarrier2
  {
    using NativeType = VkBufferMemoryBarrier2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferMemoryBarrier2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( PipelineStageFlags2 srcStageMask_        = {},
                                               AccessFlags2        srcAccessMask_       = {},
                                               PipelineStageFlags2 dstStageMask_        = {},
                                               AccessFlags2        dstAccessMask_       = {},
                                               uint32_t            srcQueueFamilyIndex_ = {},
                                               uint32_t            dstQueueFamilyIndex_ = {},
                                               Buffer              buffer_              = {},
                                               DeviceSize          offset_              = {},
                                               DeviceSize          size_                = {},
                                               const void *        pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcStageMask{ srcStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstStageMask{ dstStageMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , srcQueueFamilyIndex{ srcQueueFamilyIndex_ }
      , dstQueueFamilyIndex{ dstQueueFamilyIndex_ }
      , buffer{ buffer_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
    {
    }

    BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryBarrier2 *>( this );
    }

    operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryBarrier2 *>( this );
    }

    operator VkBufferMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferMemoryBarrier2 *>( this );
    }

    operator VkBufferMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferMemoryBarrier2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               uint32_t const &,
               uint32_t const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
#else
    bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType               = StructureType::eBufferMemoryBarrier2;
    const void *        pNext               = {};
    PipelineStageFlags2 srcStageMask        = {};
    AccessFlags2        srcAccessMask       = {};
    PipelineStageFlags2 dstStageMask        = {};
    AccessFlags2        dstAccessMask       = {};
    uint32_t            srcQueueFamilyIndex = {};
    uint32_t            dstQueueFamilyIndex = {};
    Buffer              buffer              = {};
    DeviceSize          offset              = {};
    DeviceSize          size                = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferMemoryBarrier2>
  {
    using Type = BufferMemoryBarrier2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
  {
    using Type = BufferMemoryBarrier2;
  };

  using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;

  // wrapper struct for struct VkBufferMemoryRequirementsInfo2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryRequirementsInfo2.html
  struct BufferMemoryRequirementsInfo2
  {
    using NativeType = VkBufferMemoryRequirementsInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
    {
    }

    BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
    }

    operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
    }

    operator VkBufferMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
    }

    operator VkBufferMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eBufferMemoryRequirementsInfo2;
    const void *  pNext  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferMemoryRequirementsInfo2>
  {
    using Type = BufferMemoryRequirementsInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
  {
    using Type = BufferMemoryRequirementsInfo2;
  };

  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;

  // wrapper struct for struct VkBufferOpaqueCaptureAddressCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferOpaqueCaptureAddressCreateInfo.html
  struct BufferOpaqueCaptureAddressCreateInfo
  {
    using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferOpaqueCaptureAddressCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , opaqueCaptureAddress{ opaqueCaptureAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
    {
    }

    BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      opaqueCaptureAddress = opaqueCaptureAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

    operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

    operator VkBufferOpaqueCaptureAddressCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

    operator VkBufferOpaqueCaptureAddressCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, opaqueCaptureAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
#else
    bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
#  endif
    }

    bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
    const void *  pNext                = {};
    uint64_t      opaqueCaptureAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferOpaqueCaptureAddressCreateInfo>
  {
    using Type = BufferOpaqueCaptureAddressCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
  {
    using Type = BufferOpaqueCaptureAddressCreateInfo;
  };

  using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;

  // wrapper struct for struct VkBufferUsageFlags2CreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags2CreateInfo.html
  struct BufferUsageFlags2CreateInfo
  {
    using NativeType = VkBufferUsageFlags2CreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferUsageFlags2CreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferUsageFlags2CreateInfo( *reinterpret_cast<BufferUsageFlags2CreateInfo const *>( &rhs ) )
    {
    }

    BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferUsageFlags2CreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( BufferUsageFlags2 usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferUsageFlags2CreateInfo *>( this );
    }

    operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferUsageFlags2CreateInfo *>( this );
    }

    operator VkBufferUsageFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferUsageFlags2CreateInfo *>( this );
    }

    operator VkBufferUsageFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferUsageFlags2CreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, BufferUsageFlags2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, usage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default;
#else
    bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
#  endif
    }

    bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType = StructureType::eBufferUsageFlags2CreateInfo;
    const void *      pNext = {};
    BufferUsageFlags2 usage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferUsageFlags2CreateInfo>
  {
    using Type = BufferUsageFlags2CreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferUsageFlags2CreateInfo>
  {
    using Type = BufferUsageFlags2CreateInfo;
  };

  using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo;

  // wrapper struct for struct VkBufferViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferViewCreateInfo.html
  struct BufferViewCreateInfo
  {
    using NativeType = VkBufferViewCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBufferViewCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateFlags flags_  = {},
                                               Buffer                buffer_ = {},
                                               Format                format_ = Format::eUndefined,
                                               DeviceSize            offset_ = {},
                                               DeviceSize            range_  = {},
                                               const void *          pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , buffer{ buffer_ }
      , format{ format_ }
      , offset{ offset_ }
      , range{ range_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
    {
    }

    BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BufferViewCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
    {
      range = range_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferViewCreateInfo *>( this );
    }

    operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferViewCreateInfo *>( this );
    }

    operator VkBufferViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBufferViewCreateInfo *>( this );
    }

    operator VkBufferViewCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBufferViewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, BufferViewCreateFlags const &, Buffer const &, Format const &, DeviceSize const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, buffer, format, offset, range );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferViewCreateInfo const & ) const = default;
#else
    bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) &&
             ( offset == rhs.offset ) && ( range == rhs.range );
#  endif
    }

    bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType  = StructureType::eBufferViewCreateInfo;
    const void *          pNext  = {};
    BufferViewCreateFlags flags  = {};
    Buffer                buffer = {};
    Format                format = Format::eUndefined;
    DeviceSize            offset = {};
    DeviceSize            range  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBufferViewCreateInfo>
  {
    using Type = BufferViewCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
  {
    using Type = BufferViewCreateInfo;
  };

  // wrapper struct for struct VkStridedDeviceAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressNV.html
  struct StridedDeviceAddressNV
  {
    using NativeType = VkStridedDeviceAddressNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( DeviceAddress startAddress_ = {}, DeviceSize strideInBytes_ = {} ) VULKAN_HPP_NOEXCEPT
      : startAddress{ startAddress_ }
      , strideInBytes{ strideInBytes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StridedDeviceAddressNV( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : StridedDeviceAddressNV( *reinterpret_cast<StridedDeviceAddressNV const *>( &rhs ) )
    {
    }

    StridedDeviceAddressNV & operator=( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    StridedDeviceAddressNV & operator=( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<StridedDeviceAddressNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStartAddress( DeviceAddress startAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      startAddress = startAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStrideInBytes( DeviceSize strideInBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      strideInBytes = strideInBytes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkStridedDeviceAddressNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStridedDeviceAddressNV *>( this );
    }

    operator VkStridedDeviceAddressNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStridedDeviceAddressNV *>( this );
    }

    operator VkStridedDeviceAddressNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkStridedDeviceAddressNV *>( this );
    }

    operator VkStridedDeviceAddressNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkStridedDeviceAddressNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( startAddress, strideInBytes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StridedDeviceAddressNV const & ) const = default;
#else
    bool operator==( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( startAddress == rhs.startAddress ) && ( strideInBytes == rhs.strideInBytes );
#  endif
    }

    bool operator!=( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress startAddress  = {};
    DeviceSize    strideInBytes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkStridedDeviceAddressNV>
  {
    using Type = StridedDeviceAddressNV;
  };
#endif

  // wrapper struct for struct VkBuildPartitionedAccelerationStructureIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureIndirectCommandNV.html
  struct BuildPartitionedAccelerationStructureIndirectCommandNV
  {
    using NativeType = VkBuildPartitionedAccelerationStructureIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV(
      PartitionedAccelerationStructureOpTypeNV opType_   = PartitionedAccelerationStructureOpTypeNV::eWriteInstance,
      uint32_t                                 argCount_ = {},
      StridedDeviceAddressNV                   argData_  = {} ) VULKAN_HPP_NOEXCEPT
      : opType{ opType_ }
      , argCount{ argCount_ }
      , argData{ argData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    BuildPartitionedAccelerationStructureIndirectCommandNV( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BuildPartitionedAccelerationStructureIndirectCommandNV( *reinterpret_cast<BuildPartitionedAccelerationStructureIndirectCommandNV const *>( &rhs ) )
    {
    }

    BuildPartitionedAccelerationStructureIndirectCommandNV &
      operator=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BuildPartitionedAccelerationStructureIndirectCommandNV &
      operator=( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BuildPartitionedAccelerationStructureIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV &
      setOpType( PartitionedAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT
    {
      opType = opType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgCount( uint32_t argCount_ ) VULKAN_HPP_NOEXCEPT
    {
      argCount = argCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgData( StridedDeviceAddressNV const & argData_ ) VULKAN_HPP_NOEXCEPT
    {
      argData = argData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBuildPartitionedAccelerationStructureIndirectCommandNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBuildPartitionedAccelerationStructureIndirectCommandNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBuildPartitionedAccelerationStructureIndirectCommandNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBuildPartitionedAccelerationStructureIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PartitionedAccelerationStructureOpTypeNV const &, uint32_t const &, StridedDeviceAddressNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( opType, argCount, argData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BuildPartitionedAccelerationStructureIndirectCommandNV const & ) const = default;
#else
    bool operator==( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( opType == rhs.opType ) && ( argCount == rhs.argCount ) && ( argData == rhs.argData );
#  endif
    }

    bool operator!=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    PartitionedAccelerationStructureOpTypeNV opType   = PartitionedAccelerationStructureOpTypeNV::eWriteInstance;
    uint32_t                                 argCount = {};
    StridedDeviceAddressNV                   argData  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBuildPartitionedAccelerationStructureIndirectCommandNV>
  {
    using Type = BuildPartitionedAccelerationStructureIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkPartitionedAccelerationStructureInstancesInputNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstancesInputNV.html
  struct PartitionedAccelerationStructureInstancesInputNV
  {
    using NativeType = VkPartitionedAccelerationStructureInstancesInputNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePartitionedAccelerationStructureInstancesInputNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( BuildAccelerationStructureFlagsKHR flags_                             = {},
                                                                           uint32_t                           instanceCount_                     = {},
                                                                           uint32_t                           maxInstancePerPartitionCount_      = {},
                                                                           uint32_t                           partitionCount_                    = {},
                                                                           uint32_t                           maxInstanceInGlobalPartitionCount_ = {},
                                                                           void *                             pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , instanceCount{ instanceCount_ }
      , maxInstancePerPartitionCount{ maxInstancePerPartitionCount_ }
      , partitionCount{ partitionCount_ }
      , maxInstanceInGlobalPartitionCount{ maxInstanceInGlobalPartitionCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PartitionedAccelerationStructureInstancesInputNV( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PartitionedAccelerationStructureInstancesInputNV( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PartitionedAccelerationStructureInstancesInputNV( *reinterpret_cast<PartitionedAccelerationStructureInstancesInputNV const *>( &rhs ) )
    {
    }

    PartitionedAccelerationStructureInstancesInputNV & operator=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PartitionedAccelerationStructureInstancesInputNV & operator=( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PartitionedAccelerationStructureInstancesInputNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV &
      setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxInstancePerPartitionCount = maxInstancePerPartitionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPartitionCount( uint32_t partitionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      partitionCount = partitionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV &
      setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPartitionedAccelerationStructureInstancesInputNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV *>( this );
    }

    operator VkPartitionedAccelerationStructureInstancesInputNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPartitionedAccelerationStructureInstancesInputNV *>( this );
    }

    operator VkPartitionedAccelerationStructureInstancesInputNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV *>( this );
    }

    operator VkPartitionedAccelerationStructureInstancesInputNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPartitionedAccelerationStructureInstancesInputNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               BuildAccelerationStructureFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, instanceCount, maxInstancePerPartitionCount, partitionCount, maxInstanceInGlobalPartitionCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PartitionedAccelerationStructureInstancesInputNV const & ) const = default;
#else
    bool operator==( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) &&
             ( maxInstancePerPartitionCount == rhs.maxInstancePerPartitionCount ) && ( partitionCount == rhs.partitionCount ) &&
             ( maxInstanceInGlobalPartitionCount == rhs.maxInstanceInGlobalPartitionCount );
#  endif
    }

    bool operator!=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                             = StructureType::ePartitionedAccelerationStructureInstancesInputNV;
    void *                             pNext                             = {};
    BuildAccelerationStructureFlagsKHR flags                             = {};
    uint32_t                           instanceCount                     = {};
    uint32_t                           maxInstancePerPartitionCount      = {};
    uint32_t                           partitionCount                    = {};
    uint32_t                           maxInstanceInGlobalPartitionCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPartitionedAccelerationStructureInstancesInputNV>
  {
    using Type = PartitionedAccelerationStructureInstancesInputNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePartitionedAccelerationStructureInstancesInputNV>
  {
    using Type = PartitionedAccelerationStructureInstancesInputNV;
  };

  // wrapper struct for struct VkBuildPartitionedAccelerationStructureInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureInfoNV.html
  struct BuildPartitionedAccelerationStructureInfoNV
  {
    using NativeType = VkBuildPartitionedAccelerationStructureInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eBuildPartitionedAccelerationStructureInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( PartitionedAccelerationStructureInstancesInputNV input_                        = {},
                                                                      DeviceAddress                                    srcAccelerationStructureData_ = {},
                                                                      DeviceAddress                                    dstAccelerationStructureData_ = {},
                                                                      DeviceAddress                                    scratchData_                  = {},
                                                                      DeviceAddress                                    srcInfos_                     = {},
                                                                      DeviceAddress                                    srcInfosCount_                = {},
                                                                      void *                                           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , input{ input_ }
      , srcAccelerationStructureData{ srcAccelerationStructureData_ }
      , dstAccelerationStructureData{ dstAccelerationStructureData_ }
      , scratchData{ scratchData_ }
      , srcInfos{ srcInfos_ }
      , srcInfosCount{ srcInfosCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BuildPartitionedAccelerationStructureInfoNV( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BuildPartitionedAccelerationStructureInfoNV( *reinterpret_cast<BuildPartitionedAccelerationStructureInfoNV const *>( &rhs ) )
    {
    }

    BuildPartitionedAccelerationStructureInfoNV & operator=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    BuildPartitionedAccelerationStructureInfoNV & operator=( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<BuildPartitionedAccelerationStructureInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV &
      setInput( PartitionedAccelerationStructureInstancesInputNV const & input_ ) VULKAN_HPP_NOEXCEPT
    {
      input = input_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV &
      setSrcAccelerationStructureData( DeviceAddress srcAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccelerationStructureData = srcAccelerationStructureData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV &
      setDstAccelerationStructureData( DeviceAddress dstAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccelerationStructureData = dstAccelerationStructureData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setScratchData( DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT
    {
      scratchData = scratchData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfos( DeviceAddress srcInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      srcInfos = srcInfos_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfosCount( DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT
    {
      srcInfosCount = srcInfosCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkBuildPartitionedAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBuildPartitionedAccelerationStructureInfoNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV *>( this );
    }

    operator VkBuildPartitionedAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkBuildPartitionedAccelerationStructureInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               PartitionedAccelerationStructureInstancesInputNV const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, input, srcAccelerationStructureData, dstAccelerationStructureData, scratchData, srcInfos, srcInfosCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BuildPartitionedAccelerationStructureInfoNV const & ) const = default;
#else
    bool operator==( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( input == rhs.input ) &&
             ( srcAccelerationStructureData == rhs.srcAccelerationStructureData ) && ( dstAccelerationStructureData == rhs.dstAccelerationStructureData ) &&
             ( scratchData == rhs.scratchData ) && ( srcInfos == rhs.srcInfos ) && ( srcInfosCount == rhs.srcInfosCount );
#  endif
    }

    bool operator!=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                    sType                        = StructureType::eBuildPartitionedAccelerationStructureInfoNV;
    void *                                           pNext                        = {};
    PartitionedAccelerationStructureInstancesInputNV input                        = {};
    DeviceAddress                                    srcAccelerationStructureData = {};
    DeviceAddress                                    dstAccelerationStructureData = {};
    DeviceAddress                                    scratchData                  = {};
    DeviceAddress                                    srcInfos                     = {};
    DeviceAddress                                    srcInfosCount                = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkBuildPartitionedAccelerationStructureInfoNV>
  {
    using Type = BuildPartitionedAccelerationStructureInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eBuildPartitionedAccelerationStructureInfoNV>
  {
    using Type = BuildPartitionedAccelerationStructureInfoNV;
  };

  // wrapper struct for struct VkCalibratedTimestampInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCalibratedTimestampInfoKHR.html
  struct CalibratedTimestampInfoKHR
  {
    using NativeType = VkCalibratedTimestampInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCalibratedTimestampInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( TimeDomainKHR timeDomain_ = TimeDomainKHR::eDevice, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , timeDomain{ timeDomain_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CalibratedTimestampInfoKHR( *reinterpret_cast<CalibratedTimestampInfoKHR const *>( &rhs ) )
    {
    }

    CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CalibratedTimestampInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomain = timeDomain_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( this );
    }

    operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCalibratedTimestampInfoKHR *>( this );
    }

    operator VkCalibratedTimestampInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( this );
    }

    operator VkCalibratedTimestampInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCalibratedTimestampInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TimeDomainKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, timeDomain );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default;
#else
    bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain );
#  endif
    }

    bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eCalibratedTimestampInfoKHR;
    const void *  pNext      = {};
    TimeDomainKHR timeDomain = TimeDomainKHR::eDevice;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCalibratedTimestampInfoKHR>
  {
    using Type = CalibratedTimestampInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCalibratedTimestampInfoKHR>
  {
    using Type = CalibratedTimestampInfoKHR;
  };

  using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR;

  // wrapper struct for struct VkCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointData2NV.html
  struct CheckpointData2NV
  {
    using NativeType = VkCheckpointData2NV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCheckpointData2NV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CheckpointData2NV( PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stage{ stage_ }
      , pCheckpointMarker{ pCheckpointMarker_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) ) {}

    CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CheckpointData2NV const *>( &rhs );
      return *this;
    }

    operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCheckpointData2NV *>( this );
    }

    operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCheckpointData2NV *>( this );
    }

    operator VkCheckpointData2NV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCheckpointData2NV *>( this );
    }

    operator VkCheckpointData2NV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCheckpointData2NV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PipelineStageFlags2 const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stage, pCheckpointMarker );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CheckpointData2NV const & ) const = default;
#else
    bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
#  endif
    }

    bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType             = StructureType::eCheckpointData2NV;
    void *              pNext             = {};
    PipelineStageFlags2 stage             = {};
    void *              pCheckpointMarker = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCheckpointData2NV>
  {
    using Type = CheckpointData2NV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCheckpointData2NV>
  {
    using Type = CheckpointData2NV;
  };

  // wrapper struct for struct VkCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointDataNV.html
  struct CheckpointDataNV
  {
    using NativeType = VkCheckpointDataNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCheckpointDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CheckpointDataNV( PipelineStageFlagBits stage_             = PipelineStageFlagBits::eTopOfPipe,
                                           void *                pCheckpointMarker_ = {},
                                           void *                pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stage{ stage_ }
      , pCheckpointMarker{ pCheckpointMarker_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) ) {}

    CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CheckpointDataNV const *>( &rhs );
      return *this;
    }

    operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCheckpointDataNV *>( this );
    }

    operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCheckpointDataNV *>( this );
    }

    operator VkCheckpointDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCheckpointDataNV *>( this );
    }

    operator VkCheckpointDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCheckpointDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PipelineStageFlagBits const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stage, pCheckpointMarker );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CheckpointDataNV const & ) const = default;
#else
    bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker );
#  endif
    }

    bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType             = StructureType::eCheckpointDataNV;
    void *                pNext             = {};
    PipelineStageFlagBits stage             = PipelineStageFlagBits::eTopOfPipe;
    void *                pCheckpointMarker = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCheckpointDataNV>
  {
    using Type = CheckpointDataNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCheckpointDataNV>
  {
    using Type = CheckpointDataNV;
  };

  union ClearColorValue
  {
    using NativeType = VkClearColorValue;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float, 4> & float32_ = {} ) : float32( float32_ ) {}

    VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 )
      : float32{ { { float32_0, float32_1, float32_2, float32_3 } } }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t, 4> & int32_ ) : int32( int32_ ) {}

    VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 )
      : int32{ { { int32_0, int32_1, int32_2, int32_3 } } }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t, 4> & uint32_ ) : uint32( uint32_ ) {}

    VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 )
      : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } }
    {
    }
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float, 4> float32_ ) VULKAN_HPP_NOEXCEPT
    {
      float32 = float32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t, 4> int32_ ) VULKAN_HPP_NOEXCEPT
    {
      int32 = int32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t, 4> uint32_ ) VULKAN_HPP_NOEXCEPT
    {
      uint32 = uint32_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClearColorValue const &() const
    {
      return *reinterpret_cast<const VkClearColorValue *>( this );
    }

    operator VkClearColorValue &()
    {
      return *reinterpret_cast<VkClearColorValue *>( this );
    }

    ArrayWrapper1D<float, 4>    float32;
    ArrayWrapper1D<int32_t, 4>  int32;
    ArrayWrapper1D<uint32_t, 4> uint32;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClearColorValue>
  {
    using Type = ClearColorValue;
  };
#endif

  // wrapper struct for struct VkClearDepthStencilValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearDepthStencilValue.html
  struct ClearDepthStencilValue
  {
    using NativeType = VkClearDepthStencilValue;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
      : depth{ depth_ }
      , stencil{ stencil_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
    {
    }

    ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClearDepthStencilValue const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
    {
      stencil = stencil_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearDepthStencilValue *>( this );
    }

    operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearDepthStencilValue *>( this );
    }

    operator VkClearDepthStencilValue const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClearDepthStencilValue *>( this );
    }

    operator VkClearDepthStencilValue *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClearDepthStencilValue *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( depth, stencil );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClearDepthStencilValue const & ) const = default;
#else
    bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( depth == rhs.depth ) && ( stencil == rhs.stencil );
#  endif
    }

    bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float    depth   = {};
    uint32_t stencil = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClearDepthStencilValue>
  {
    using Type = ClearDepthStencilValue;
  };
#endif

  union ClearValue
  {
    using NativeType = VkClearValue;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 ClearValue( ClearColorValue color_ = {} ) : color( color_ ) {}

    VULKAN_HPP_CONSTEXPR_14 ClearValue( ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
    {
      depthStencil = depthStencil_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClearValue const &() const
    {
      return *reinterpret_cast<const VkClearValue *>( this );
    }

    operator VkClearValue &()
    {
      return *reinterpret_cast<VkClearValue *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    ClearColorValue        color;
    ClearDepthStencilValue depthStencil;
#else
    VkClearColorValue        color;
    VkClearDepthStencilValue depthStencil;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClearValue>
  {
    using Type = ClearValue;
  };
#endif

  // wrapper struct for struct VkClearAttachment, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearAttachment.html
  struct ClearAttachment
  {
    using NativeType = VkClearAttachment;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ClearAttachment( ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask{ aspectMask_ }
      , colorAttachment{ colorAttachment_ }
      , clearValue{ clearValue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) ) {}

    ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClearAttachment const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachment = colorAttachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValue = clearValue_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearAttachment *>( this );
    }

    operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearAttachment *>( this );
    }

    operator VkClearAttachment const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClearAttachment *>( this );
    }

    operator VkClearAttachment *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClearAttachment *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageAspectFlags const &, uint32_t const &, ClearValue const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( aspectMask, colorAttachment, clearValue );
    }
#endif

  public:
    ImageAspectFlags aspectMask      = {};
    uint32_t         colorAttachment = {};
    ClearValue       clearValue      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClearAttachment>
  {
    using Type = ClearAttachment;
  };
#endif

  // wrapper struct for struct VkClearRect, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearRect.html
  struct ClearRect
  {
    using NativeType = VkClearRect;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClearRect( Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : rect{ rect_ }
      , baseArrayLayer{ baseArrayLayer_ }
      , layerCount{ layerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) ) {}

    ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClearRect const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
    {
      rect = rect_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearRect *>( this );
    }

    operator VkClearRect &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearRect *>( this );
    }

    operator VkClearRect const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClearRect *>( this );
    }

    operator VkClearRect *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClearRect *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Rect2D const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( rect, baseArrayLayer, layerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClearRect const & ) const = default;
#else
    bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
#  endif
    }

    bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Rect2D   rect           = {};
    uint32_t baseArrayLayer = {};
    uint32_t layerCount     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClearRect>
  {
    using Type = ClearRect;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV.html
  struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV
  {
    using NativeType = VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( uint32_t      clusterReferencesCount_  = {},
                                                                                     uint32_t      clusterReferencesStride_ = {},
                                                                                     DeviceAddress clusterReferences_       = {} ) VULKAN_HPP_NOEXCEPT
      : clusterReferencesCount{ clusterReferencesCount_ }
      , clusterReferencesStride{ clusterReferencesStride_ }
      , clusterReferences{ clusterReferences_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureBuildClustersBottomLevelInfoNV(
          *reinterpret_cast<ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureBuildClustersBottomLevelInfoNV &
      operator=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureBuildClustersBottomLevelInfoNV &
      operator=( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV &
      setClusterReferencesCount( uint32_t clusterReferencesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterReferencesCount = clusterReferencesCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV &
      setClusterReferencesStride( uint32_t clusterReferencesStride_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterReferencesStride = clusterReferencesStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV &
      setClusterReferences( DeviceAddress clusterReferences_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterReferences = clusterReferences_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( clusterReferencesCount, clusterReferencesStride, clusterReferences );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( clusterReferencesCount == rhs.clusterReferencesCount ) && ( clusterReferencesStride == rhs.clusterReferencesStride ) &&
             ( clusterReferences == rhs.clusterReferences );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t      clusterReferencesCount  = {};
    uint32_t      clusterReferencesStride = {};
    DeviceAddress clusterReferences       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV>
  {
    using Type = ClusterAccelerationStructureBuildClustersBottomLevelInfoNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.html
  struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV
  {
    using NativeType = VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( uint32_t geometryIndex_ = {},
                                                                                      uint32_t reserved_      = {},
                                                                                      uint32_t geometryFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : geometryIndex{ geometryIndex_ }
      , reserved{ reserved_ }
      , geometryFlags{ geometryFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV(
          *reinterpret_cast<ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &
      operator=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &
      operator=( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryIndex( uint32_t geometryIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndex = geometryIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT
    {
      reserved = reserved_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryFlags( uint32_t geometryFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryFlags = geometryFlags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *>( this );
    }

    operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *>( this );
    }

    operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *>( this );
    }

    operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( geometryIndex, reserved, geometryFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( geometryIndex == rhs.geometryIndex ) && ( reserved == rhs.reserved ) && ( geometryFlags == rhs.geometryFlags );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t geometryIndex : 24;
    uint32_t reserved      : 5;
    uint32_t geometryFlags : 3;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV>
  {
    using Type = ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterInfoNV.html
  struct ClusterAccelerationStructureBuildTriangleClusterInfoNV
  {
    using NativeType = VkClusterAccelerationStructureBuildTriangleClusterInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV(
      uint32_t                                                    clusterID_                         = {},
      ClusterAccelerationStructureClusterFlagsNV                  clusterFlags_                      = {},
      uint32_t                                                    triangleCount_                     = {},
      uint32_t                                                    vertexCount_                       = {},
      uint32_t                                                    positionTruncateBitCount_          = {},
      uint32_t                                                    indexType_                         = {},
      uint32_t                                                    opacityMicromapIndexType_          = {},
      ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {},
      uint16_t                                                    indexBufferStride_                 = {},
      uint16_t                                                    vertexBufferStride_                = {},
      uint16_t                                                    geometryIndexAndFlagsBufferStride_ = {},
      uint16_t                                                    opacityMicromapIndexBufferStride_  = {},
      DeviceAddress                                               indexBuffer_                       = {},
      DeviceAddress                                               vertexBuffer_                      = {},
      DeviceAddress                                               geometryIndexAndFlagsBuffer_       = {},
      DeviceAddress                                               opacityMicromapArray_              = {},
      DeviceAddress                                               opacityMicromapIndexBuffer_        = {} ) VULKAN_HPP_NOEXCEPT
      : clusterID{ clusterID_ }
      , clusterFlags{ clusterFlags_ }
      , triangleCount{ triangleCount_ }
      , vertexCount{ vertexCount_ }
      , positionTruncateBitCount{ positionTruncateBitCount_ }
      , indexType{ indexType_ }
      , opacityMicromapIndexType{ opacityMicromapIndexType_ }
      , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ }
      , indexBufferStride{ indexBufferStride_ }
      , vertexBufferStride{ vertexBufferStride_ }
      , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ }
      , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ }
      , indexBuffer{ indexBuffer_ }
      , vertexBuffer{ vertexBuffer_ }
      , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ }
      , opacityMicromapArray{ opacityMicromapArray_ }
      , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureBuildTriangleClusterInfoNV( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureBuildTriangleClusterInfoNV( *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      operator=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      operator=( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterID = clusterID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterFlags = clusterFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleCount = triangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
    {
      positionTruncateBitCount = positionTruncateBitCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexType = opacityMicromapIndexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setBaseGeometryIndexAndGeometryFlags(
      ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBufferStride = indexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBufferStride = vertexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBuffer( DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBuffer = indexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBuffer( DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBuffer = vertexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapArray = opacityMicromapArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV &
      setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               ClusterAccelerationStructureClusterFlagsNV const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &,
               uint16_t const &,
               uint16_t const &,
               uint16_t const &,
               uint16_t const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( clusterID,
                       clusterFlags,
                       triangleCount,
                       vertexCount,
                       positionTruncateBitCount,
                       indexType,
                       opacityMicromapIndexType,
                       baseGeometryIndexAndGeometryFlags,
                       indexBufferStride,
                       vertexBufferStride,
                       geometryIndexAndFlagsBufferStride,
                       opacityMicromapIndexBufferStride,
                       indexBuffer,
                       vertexBuffer,
                       geometryIndexAndFlagsBuffer,
                       opacityMicromapArray,
                       opacityMicromapIndexBuffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) &&
             ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) &&
             ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) &&
             ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) &&
             ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) &&
             ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) &&
             ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) &&
             ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                                    clusterID    = {};
    ClusterAccelerationStructureClusterFlagsNV                  clusterFlags = {};
    uint32_t                                                    triangleCount            : 9;
    uint32_t                                                    vertexCount              : 9;
    uint32_t                                                    positionTruncateBitCount : 6;
    uint32_t                                                    indexType                : 4;
    uint32_t                                                    opacityMicromapIndexType : 4;
    ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {};
    uint16_t                                                    indexBufferStride                 = {};
    uint16_t                                                    vertexBufferStride                = {};
    uint16_t                                                    geometryIndexAndFlagsBufferStride = {};
    uint16_t                                                    opacityMicromapIndexBufferStride  = {};
    DeviceAddress                                               indexBuffer                       = {};
    DeviceAddress                                               vertexBuffer                      = {};
    DeviceAddress                                               geometryIndexAndFlagsBuffer       = {};
    DeviceAddress                                               opacityMicromapArray              = {};
    DeviceAddress                                               opacityMicromapIndexBuffer        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureBuildTriangleClusterInfoNV>
  {
    using Type = ClusterAccelerationStructureBuildTriangleClusterInfoNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.html
  struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV
  {
    using NativeType = VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV(
      uint32_t                                                    clusterID_                         = {},
      ClusterAccelerationStructureClusterFlagsNV                  clusterFlags_                      = {},
      uint32_t                                                    triangleCount_                     = {},
      uint32_t                                                    vertexCount_                       = {},
      uint32_t                                                    positionTruncateBitCount_          = {},
      uint32_t                                                    indexType_                         = {},
      uint32_t                                                    opacityMicromapIndexType_          = {},
      ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {},
      uint16_t                                                    indexBufferStride_                 = {},
      uint16_t                                                    vertexBufferStride_                = {},
      uint16_t                                                    geometryIndexAndFlagsBufferStride_ = {},
      uint16_t                                                    opacityMicromapIndexBufferStride_  = {},
      DeviceAddress                                               indexBuffer_                       = {},
      DeviceAddress                                               vertexBuffer_                      = {},
      DeviceAddress                                               geometryIndexAndFlagsBuffer_       = {},
      DeviceAddress                                               opacityMicromapArray_              = {},
      DeviceAddress                                               opacityMicromapIndexBuffer_        = {},
      DeviceAddress                                               instantiationBoundingBoxLimit_     = {} ) VULKAN_HPP_NOEXCEPT
      : clusterID{ clusterID_ }
      , clusterFlags{ clusterFlags_ }
      , triangleCount{ triangleCount_ }
      , vertexCount{ vertexCount_ }
      , positionTruncateBitCount{ positionTruncateBitCount_ }
      , indexType{ indexType_ }
      , opacityMicromapIndexType{ opacityMicromapIndexType_ }
      , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ }
      , indexBufferStride{ indexBufferStride_ }
      , vertexBufferStride{ vertexBufferStride_ }
      , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ }
      , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ }
      , indexBuffer{ indexBuffer_ }
      , vertexBuffer{ vertexBuffer_ }
      , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ }
      , opacityMicromapArray{ opacityMicromapArray_ }
      , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ }
      , instantiationBoundingBoxLimit{ instantiationBoundingBoxLimit_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV(
      ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV(
          *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *>( &rhs ) )
    {
    }

    explicit ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV(
      ClusterAccelerationStructureBuildTriangleClusterInfoNV const & clusterAccelerationStructureBuildTriangleClusterInfoNV,
      DeviceAddress                                                  instantiationBoundingBoxLimit_ = {} )
      : clusterID( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID )
      , clusterFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags )
      , triangleCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount )
      , vertexCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount )
      , positionTruncateBitCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount )
      , indexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType )
      , opacityMicromapIndexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType )
      , baseGeometryIndexAndGeometryFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags )
      , indexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride )
      , vertexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride )
      , geometryIndexAndFlagsBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride )
      , opacityMicromapIndexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride )
      , indexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer )
      , vertexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer )
      , geometryIndexAndFlagsBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer )
      , opacityMicromapArray( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray )
      , opacityMicromapIndexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer )
      , instantiationBoundingBoxLimit( instantiationBoundingBoxLimit_ )
    {
    }

    ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      operator=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      operator=( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterID = clusterID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterFlags = clusterFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleCount = triangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
    {
      positionTruncateBitCount = positionTruncateBitCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexType = opacityMicromapIndexType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setBaseGeometryIndexAndGeometryFlags(
      ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBufferStride = indexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBufferStride = vertexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexBuffer( DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      indexBuffer = indexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexBuffer( DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBuffer = vertexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapArray = opacityMicromapArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &
      setInstantiationBoundingBoxLimit( DeviceAddress instantiationBoundingBoxLimit_ ) VULKAN_HPP_NOEXCEPT
    {
      instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               ClusterAccelerationStructureClusterFlagsNV const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &,
               uint16_t const &,
               uint16_t const &,
               uint16_t const &,
               uint16_t const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( clusterID,
                       clusterFlags,
                       triangleCount,
                       vertexCount,
                       positionTruncateBitCount,
                       indexType,
                       opacityMicromapIndexType,
                       baseGeometryIndexAndGeometryFlags,
                       indexBufferStride,
                       vertexBufferStride,
                       geometryIndexAndFlagsBufferStride,
                       opacityMicromapIndexBufferStride,
                       indexBuffer,
                       vertexBuffer,
                       geometryIndexAndFlagsBuffer,
                       opacityMicromapArray,
                       opacityMicromapIndexBuffer,
                       instantiationBoundingBoxLimit );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) &&
             ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) &&
             ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) &&
             ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) &&
             ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) &&
             ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) &&
             ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) &&
             ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ) &&
             ( instantiationBoundingBoxLimit == rhs.instantiationBoundingBoxLimit );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                                    clusterID    = {};
    ClusterAccelerationStructureClusterFlagsNV                  clusterFlags = {};
    uint32_t                                                    triangleCount            : 9;
    uint32_t                                                    vertexCount              : 9;
    uint32_t                                                    positionTruncateBitCount : 6;
    uint32_t                                                    indexType                : 4;
    uint32_t                                                    opacityMicromapIndexType : 4;
    ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {};
    uint16_t                                                    indexBufferStride                 = {};
    uint16_t                                                    vertexBufferStride                = {};
    uint16_t                                                    geometryIndexAndFlagsBufferStride = {};
    uint16_t                                                    opacityMicromapIndexBufferStride  = {};
    DeviceAddress                                               indexBuffer                       = {};
    DeviceAddress                                               vertexBuffer                      = {};
    DeviceAddress                                               geometryIndexAndFlagsBuffer       = {};
    DeviceAddress                                               opacityMicromapArray              = {};
    DeviceAddress                                               opacityMicromapIndexBuffer        = {};
    DeviceAddress                                               instantiationBoundingBoxLimit     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV>
  {
    using Type = ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureClustersBottomLevelInputNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClustersBottomLevelInputNV.html
  struct ClusterAccelerationStructureClustersBottomLevelInputNV
  {
    using NativeType = VkClusterAccelerationStructureClustersBottomLevelInputNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( uint32_t maxTotalClusterCount_                    = {},
                                                                                 uint32_t maxClusterCountPerAccelerationStructure_ = {},
                                                                                 void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxTotalClusterCount{ maxTotalClusterCount_ }
      , maxClusterCountPerAccelerationStructure{ maxClusterCountPerAccelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureClustersBottomLevelInputNV( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureClustersBottomLevelInputNV( *reinterpret_cast<ClusterAccelerationStructureClustersBottomLevelInputNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureClustersBottomLevelInputNV &
      operator=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureClustersBottomLevelInputNV &
      operator=( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureClustersBottomLevelInputNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV &
      setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxTotalClusterCount = maxTotalClusterCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV &
      setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureClustersBottomLevelInputNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureClustersBottomLevelInputNV *>( this );
    }

    operator VkClusterAccelerationStructureClustersBottomLevelInputNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureClustersBottomLevelInputNV *>( this );
    }

    operator VkClusterAccelerationStructureClustersBottomLevelInputNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureClustersBottomLevelInputNV *>( this );
    }

    operator VkClusterAccelerationStructureClustersBottomLevelInputNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureClustersBottomLevelInputNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxTotalClusterCount, maxClusterCountPerAccelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureClustersBottomLevelInputNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTotalClusterCount == rhs.maxTotalClusterCount ) &&
             ( maxClusterCountPerAccelerationStructure == rhs.maxClusterCountPerAccelerationStructure );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                   = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV;
    void *        pNext                                   = {};
    uint32_t      maxTotalClusterCount                    = {};
    uint32_t      maxClusterCountPerAccelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureClustersBottomLevelInputNV>
  {
    using Type = ClusterAccelerationStructureClustersBottomLevelInputNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV>
  {
    using Type = ClusterAccelerationStructureClustersBottomLevelInputNV;
  };

  // wrapper struct for struct VkClusterAccelerationStructureTriangleClusterInputNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureTriangleClusterInputNV.html
  struct ClusterAccelerationStructureTriangleClusterInputNV
  {
    using NativeType = VkClusterAccelerationStructureTriangleClusterInputNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eClusterAccelerationStructureTriangleClusterInputNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureTriangleClusterInputNV( Format   vertexFormat_                  = Format::eUndefined,
                                                                             uint32_t maxGeometryIndexValue_         = {},
                                                                             uint32_t maxClusterUniqueGeometryCount_ = {},
                                                                             uint32_t maxClusterTriangleCount_       = {},
                                                                             uint32_t maxClusterVertexCount_         = {},
                                                                             uint32_t maxTotalTriangleCount_         = {},
                                                                             uint32_t maxTotalVertexCount_           = {},
                                                                             uint32_t minPositionTruncateBitCount_   = {},
                                                                             void *   pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexFormat{ vertexFormat_ }
      , maxGeometryIndexValue{ maxGeometryIndexValue_ }
      , maxClusterUniqueGeometryCount{ maxClusterUniqueGeometryCount_ }
      , maxClusterTriangleCount{ maxClusterTriangleCount_ }
      , maxClusterVertexCount{ maxClusterVertexCount_ }
      , maxTotalTriangleCount{ maxTotalTriangleCount_ }
      , maxTotalVertexCount{ maxTotalVertexCount_ }
      , minPositionTruncateBitCount{ minPositionTruncateBitCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureTriangleClusterInputNV( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureTriangleClusterInputNV( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureTriangleClusterInputNV( *reinterpret_cast<ClusterAccelerationStructureTriangleClusterInputNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureTriangleClusterInputNV &
      operator=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureTriangleClusterInputNV & operator=( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureTriangleClusterInputNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setVertexFormat( Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) VULKAN_HPP_NOEXCEPT
    {
      maxGeometryIndexValue = maxGeometryIndexValue_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV &
      setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV &
      setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxClusterTriangleCount = maxClusterTriangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxClusterVertexCount = maxClusterVertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxTotalTriangleCount = maxTotalTriangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxTotalVertexCount = maxTotalVertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV &
      setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT
    {
      minPositionTruncateBitCount = minPositionTruncateBitCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureTriangleClusterInputNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureTriangleClusterInputNV *>( this );
    }

    operator VkClusterAccelerationStructureTriangleClusterInputNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureTriangleClusterInputNV *>( this );
    }

    operator VkClusterAccelerationStructureTriangleClusterInputNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureTriangleClusterInputNV *>( this );
    }

    operator VkClusterAccelerationStructureTriangleClusterInputNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureTriangleClusterInputNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       vertexFormat,
                       maxGeometryIndexValue,
                       maxClusterUniqueGeometryCount,
                       maxClusterTriangleCount,
                       maxClusterVertexCount,
                       maxTotalTriangleCount,
                       maxTotalVertexCount,
                       minPositionTruncateBitCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureTriangleClusterInputNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexFormat == rhs.vertexFormat ) &&
             ( maxGeometryIndexValue == rhs.maxGeometryIndexValue ) && ( maxClusterUniqueGeometryCount == rhs.maxClusterUniqueGeometryCount ) &&
             ( maxClusterTriangleCount == rhs.maxClusterTriangleCount ) && ( maxClusterVertexCount == rhs.maxClusterVertexCount ) &&
             ( maxTotalTriangleCount == rhs.maxTotalTriangleCount ) && ( maxTotalVertexCount == rhs.maxTotalVertexCount ) &&
             ( minPositionTruncateBitCount == rhs.minPositionTruncateBitCount );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::eClusterAccelerationStructureTriangleClusterInputNV;
    void *        pNext                         = {};
    Format        vertexFormat                  = Format::eUndefined;
    uint32_t      maxGeometryIndexValue         = {};
    uint32_t      maxClusterUniqueGeometryCount = {};
    uint32_t      maxClusterTriangleCount       = {};
    uint32_t      maxClusterVertexCount         = {};
    uint32_t      maxTotalTriangleCount         = {};
    uint32_t      maxTotalVertexCount           = {};
    uint32_t      minPositionTruncateBitCount   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureTriangleClusterInputNV>
  {
    using Type = ClusterAccelerationStructureTriangleClusterInputNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eClusterAccelerationStructureTriangleClusterInputNV>
  {
    using Type = ClusterAccelerationStructureTriangleClusterInputNV;
  };

  // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInputNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInputNV.html
  struct ClusterAccelerationStructureMoveObjectsInputNV
  {
    using NativeType = VkClusterAccelerationStructureMoveObjectsInputNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eClusterAccelerationStructureMoveObjectsInputNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureTypeNV type_ = ClusterAccelerationStructureTypeNV::eClustersBottomLevel,
                                                      Bool32                             noMoveOverlap_ = {},
                                                      DeviceSize                         maxMovedBytes_ = {},
                                                      void *                             pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , noMoveOverlap{ noMoveOverlap_ }
      , maxMovedBytes{ maxMovedBytes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureMoveObjectsInputNV( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureMoveObjectsInputNV( *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInputNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureMoveObjectsInputNV & operator=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureMoveObjectsInputNV & operator=( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInputNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setType( ClusterAccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setNoMoveOverlap( Bool32 noMoveOverlap_ ) VULKAN_HPP_NOEXCEPT
    {
      noMoveOverlap = noMoveOverlap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setMaxMovedBytes( DeviceSize maxMovedBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      maxMovedBytes = maxMovedBytes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureMoveObjectsInputNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInputNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInputNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInputNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInputNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInputNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInputNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInputNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ClusterAccelerationStructureTypeNV const &, Bool32 const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, noMoveOverlap, maxMovedBytes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureMoveObjectsInputNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( noMoveOverlap == rhs.noMoveOverlap ) &&
             ( maxMovedBytes == rhs.maxMovedBytes );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType         = StructureType::eClusterAccelerationStructureMoveObjectsInputNV;
    void *                             pNext         = {};
    ClusterAccelerationStructureTypeNV type          = ClusterAccelerationStructureTypeNV::eClustersBottomLevel;
    Bool32                             noMoveOverlap = {};
    DeviceSize                         maxMovedBytes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureMoveObjectsInputNV>
  {
    using Type = ClusterAccelerationStructureMoveObjectsInputNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eClusterAccelerationStructureMoveObjectsInputNV>
  {
    using Type = ClusterAccelerationStructureMoveObjectsInputNV;
  };

  union ClusterAccelerationStructureOpInputNV
  {
    using NativeType = VkClusterAccelerationStructureOpInputNV;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ = {} )
      : pClustersBottomLevel( pClustersBottomLevel_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ )
      : pTriangleClusters( pTriangleClusters_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ )
      : pMoveObjects( pMoveObjects_ )
    {
    }
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV &
      setPClustersBottomLevel( ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      pClustersBottomLevel = pClustersBottomLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV &
      setPTriangleClusters( ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) VULKAN_HPP_NOEXCEPT
    {
      pTriangleClusters = pTriangleClusters_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV &
      setPMoveObjects( ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) VULKAN_HPP_NOEXCEPT
    {
      pMoveObjects = pMoveObjects_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureOpInputNV const &() const
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureOpInputNV *>( this );
    }

    operator VkClusterAccelerationStructureOpInputNV &()
    {
      return *reinterpret_cast<VkClusterAccelerationStructureOpInputNV *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel;
    ClusterAccelerationStructureTriangleClusterInputNV *     pTriangleClusters;
    ClusterAccelerationStructureMoveObjectsInputNV *         pMoveObjects;
#else
    VkClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel;
    VkClusterAccelerationStructureTriangleClusterInputNV *     pTriangleClusters;
    VkClusterAccelerationStructureMoveObjectsInputNV *         pMoveObjects;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureOpInputNV>
  {
    using Type = ClusterAccelerationStructureOpInputNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureInputInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInputInfoNV.html
  struct ClusterAccelerationStructureInputInfoNV
  {
    using NativeType = VkClusterAccelerationStructureInputInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eClusterAccelerationStructureInputInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ClusterAccelerationStructureInputInfoNV( uint32_t                              maxAccelerationStructureCount_ = {},
                                               BuildAccelerationStructureFlagsKHR    flags_                         = {},
                                               ClusterAccelerationStructureOpTypeNV  opType_  = ClusterAccelerationStructureOpTypeNV::eMoveObjects,
                                               ClusterAccelerationStructureOpModeNV  opMode_  = ClusterAccelerationStructureOpModeNV::eImplicitDestinations,
                                               ClusterAccelerationStructureOpInputNV opInput_ = {},
                                               void *                                pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxAccelerationStructureCount{ maxAccelerationStructureCount_ }
      , flags{ flags_ }
      , opType{ opType_ }
      , opMode{ opMode_ }
      , opInput{ opInput_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureInputInfoNV( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureInputInfoNV( *reinterpret_cast<ClusterAccelerationStructureInputInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureInputInfoNV & operator=( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureInputInfoNV & operator=( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureInputInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV &
      setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxAccelerationStructureCount = maxAccelerationStructureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpType( ClusterAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT
    {
      opType = opType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpMode( ClusterAccelerationStructureOpModeNV opMode_ ) VULKAN_HPP_NOEXCEPT
    {
      opMode = opMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpInput( ClusterAccelerationStructureOpInputNV const & opInput_ ) VULKAN_HPP_NOEXCEPT
    {
      opInput = opInput_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureInputInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInputInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureInputInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInputInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInputInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureInputInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               BuildAccelerationStructureFlagsKHR const &,
               ClusterAccelerationStructureOpTypeNV const &,
               ClusterAccelerationStructureOpModeNV const &,
               ClusterAccelerationStructureOpInputNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxAccelerationStructureCount, flags, opType, opMode, opInput );
    }
#endif

  public:
    StructureType                         sType                         = StructureType::eClusterAccelerationStructureInputInfoNV;
    void *                                pNext                         = {};
    uint32_t                              maxAccelerationStructureCount = {};
    BuildAccelerationStructureFlagsKHR    flags                         = {};
    ClusterAccelerationStructureOpTypeNV  opType                        = ClusterAccelerationStructureOpTypeNV::eMoveObjects;
    ClusterAccelerationStructureOpModeNV  opMode                        = ClusterAccelerationStructureOpModeNV::eImplicitDestinations;
    ClusterAccelerationStructureOpInputNV opInput                       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureInputInfoNV>
  {
    using Type = ClusterAccelerationStructureInputInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eClusterAccelerationStructureInputInfoNV>
  {
    using Type = ClusterAccelerationStructureInputInfoNV;
  };

  // wrapper struct for struct VkStridedDeviceAddressRegionKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRegionKHR.html
  struct StridedDeviceAddressRegionKHR
  {
    using NativeType = VkStridedDeviceAddressRegionKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( DeviceAddress deviceAddress_ = {}, DeviceSize stride_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceAddress{ deviceAddress_ }
      , stride{ stride_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
    {
    }

    StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( this );
    }

    operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStridedDeviceAddressRegionKHR *>( this );
    }

    operator VkStridedDeviceAddressRegionKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( this );
    }

    operator VkStridedDeviceAddressRegionKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkStridedDeviceAddressRegionKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( deviceAddress, stride, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default;
#else
    bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress deviceAddress = {};
    DeviceSize    stride        = {};
    DeviceSize    size          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkStridedDeviceAddressRegionKHR>
  {
    using Type = StridedDeviceAddressRegionKHR;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureCommandsInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureCommandsInfoNV.html
  struct ClusterAccelerationStructureCommandsInfoNV
  {
    using NativeType = VkClusterAccelerationStructureCommandsInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eClusterAccelerationStructureCommandsInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureInputInfoNV              input_                  = {},
                                                                        DeviceAddress                                        dstImplicitData_        = {},
                                                                        DeviceAddress                                        scratchData_            = {},
                                                                        StridedDeviceAddressRegionKHR                        dstAddressesArray_      = {},
                                                                        StridedDeviceAddressRegionKHR                        dstSizesArray_          = {},
                                                                        StridedDeviceAddressRegionKHR                        srcInfosArray_          = {},
                                                                        DeviceAddress                                        srcInfosCount_          = {},
                                                                        ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ = {},
                                                                        void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , input{ input_ }
      , dstImplicitData{ dstImplicitData_ }
      , scratchData{ scratchData_ }
      , dstAddressesArray{ dstAddressesArray_ }
      , dstSizesArray{ dstSizesArray_ }
      , srcInfosArray{ srcInfosArray_ }
      , srcInfosCount{ srcInfosCount_ }
      , addressResolutionFlags{ addressResolutionFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureCommandsInfoNV( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureCommandsInfoNV( *reinterpret_cast<ClusterAccelerationStructureCommandsInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureCommandsInfoNV & operator=( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureCommandsInfoNV & operator=( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureCommandsInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setInput( ClusterAccelerationStructureInputInfoNV const & input_ ) VULKAN_HPP_NOEXCEPT
    {
      input = input_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstImplicitData( DeviceAddress dstImplicitData_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImplicitData = dstImplicitData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setScratchData( DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT
    {
      scratchData = scratchData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV &
      setDstAddressesArray( StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAddressesArray = dstAddressesArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV &
      setDstSizesArray( StridedDeviceAddressRegionKHR const & dstSizesArray_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSizesArray = dstSizesArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV &
      setSrcInfosArray( StridedDeviceAddressRegionKHR const & srcInfosArray_ ) VULKAN_HPP_NOEXCEPT
    {
      srcInfosArray = srcInfosArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setSrcInfosCount( DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT
    {
      srcInfosCount = srcInfosCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV &
      setAddressResolutionFlags( ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      addressResolutionFlags = addressResolutionFlags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureCommandsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureCommandsInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureCommandsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ClusterAccelerationStructureInputInfoNV const &,
               DeviceAddress const &,
               DeviceAddress const &,
               StridedDeviceAddressRegionKHR const &,
               StridedDeviceAddressRegionKHR const &,
               StridedDeviceAddressRegionKHR const &,
               DeviceAddress const &,
               ClusterAccelerationStructureAddressResolutionFlagsNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, input, dstImplicitData, scratchData, dstAddressesArray, dstSizesArray, srcInfosArray, srcInfosCount, addressResolutionFlags );
    }
#endif

  public:
    StructureType                                        sType                  = StructureType::eClusterAccelerationStructureCommandsInfoNV;
    void *                                               pNext                  = {};
    ClusterAccelerationStructureInputInfoNV              input                  = {};
    DeviceAddress                                        dstImplicitData        = {};
    DeviceAddress                                        scratchData            = {};
    StridedDeviceAddressRegionKHR                        dstAddressesArray      = {};
    StridedDeviceAddressRegionKHR                        dstSizesArray          = {};
    StridedDeviceAddressRegionKHR                        srcInfosArray          = {};
    DeviceAddress                                        srcInfosCount          = {};
    ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureCommandsInfoNV>
  {
    using Type = ClusterAccelerationStructureCommandsInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eClusterAccelerationStructureCommandsInfoNV>
  {
    using Type = ClusterAccelerationStructureCommandsInfoNV;
  };

  // wrapper struct for struct VkClusterAccelerationStructureGetTemplateIndicesInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGetTemplateIndicesInfoNV.html
  struct ClusterAccelerationStructureGetTemplateIndicesInfoNV
  {
    using NativeType = VkClusterAccelerationStructureGetTemplateIndicesInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGetTemplateIndicesInfoNV( DeviceAddress clusterTemplateAddress_ = {} ) VULKAN_HPP_NOEXCEPT
      : clusterTemplateAddress{ clusterTemplateAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureGetTemplateIndicesInfoNV( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureGetTemplateIndicesInfoNV( VkClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureGetTemplateIndicesInfoNV( *reinterpret_cast<ClusterAccelerationStructureGetTemplateIndicesInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureGetTemplateIndicesInfoNV &
      operator=( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureGetTemplateIndicesInfoNV & operator=( VkClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureGetTemplateIndicesInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGetTemplateIndicesInfoNV &
      setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterTemplateAddress = clusterTemplateAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureGetTemplateIndicesInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureGetTemplateIndicesInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureGetTemplateIndicesInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureGetTemplateIndicesInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( clusterTemplateAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( clusterTemplateAddress == rhs.clusterTemplateAddress );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress clusterTemplateAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureGetTemplateIndicesInfoNV>
  {
    using Type = ClusterAccelerationStructureGetTemplateIndicesInfoNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureInstantiateClusterInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInstantiateClusterInfoNV.html
  struct ClusterAccelerationStructureInstantiateClusterInfoNV
  {
    using NativeType = VkClusterAccelerationStructureInstantiateClusterInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureInstantiateClusterInfoNV( uint32_t               clusterIdOffset_        = {},
                                                                               uint32_t               geometryIndexOffset_    = {},
                                                                               uint32_t               reserved_               = {},
                                                                               DeviceAddress          clusterTemplateAddress_ = {},
                                                                               StridedDeviceAddressNV vertexBuffer_           = {} ) VULKAN_HPP_NOEXCEPT
      : clusterIdOffset{ clusterIdOffset_ }
      , geometryIndexOffset{ geometryIndexOffset_ }
      , reserved{ reserved_ }
      , clusterTemplateAddress{ clusterTemplateAddress_ }
      , vertexBuffer{ vertexBuffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureInstantiateClusterInfoNV( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureInstantiateClusterInfoNV( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureInstantiateClusterInfoNV( *reinterpret_cast<ClusterAccelerationStructureInstantiateClusterInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureInstantiateClusterInfoNV &
      operator=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureInstantiateClusterInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterIdOffset( uint32_t clusterIdOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterIdOffset = clusterIdOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryIndexOffset = geometryIndexOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT
    {
      reserved = reserved_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV &
      setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterTemplateAddress = clusterTemplateAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV &
      setVertexBuffer( StridedDeviceAddressNV const & vertexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBuffer = vertexBuffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureInstantiateClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureInstantiateClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInstantiateClusterInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureInstantiateClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInstantiateClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureInstantiateClusterInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureInstantiateClusterInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureInstantiateClusterInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, DeviceAddress const &, StridedDeviceAddressNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( clusterIdOffset, geometryIndexOffset, reserved, clusterTemplateAddress, vertexBuffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureInstantiateClusterInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( clusterIdOffset == rhs.clusterIdOffset ) && ( geometryIndexOffset == rhs.geometryIndexOffset ) && ( reserved == rhs.reserved ) &&
             ( clusterTemplateAddress == rhs.clusterTemplateAddress ) && ( vertexBuffer == rhs.vertexBuffer );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t               clusterIdOffset = {};
    uint32_t               geometryIndexOffset : 24;
    uint32_t               reserved            : 8;
    DeviceAddress          clusterTemplateAddress = {};
    StridedDeviceAddressNV vertexBuffer           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureInstantiateClusterInfoNV>
  {
    using Type = ClusterAccelerationStructureInstantiateClusterInfoNV;
  };
#endif

  // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInfoNV.html
  struct ClusterAccelerationStructureMoveObjectsInfoNV
  {
    using NativeType = VkClusterAccelerationStructureMoveObjectsInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( DeviceAddress srcAccelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAccelerationStructure{ srcAccelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      ClusterAccelerationStructureMoveObjectsInfoNV( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClusterAccelerationStructureMoveObjectsInfoNV( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClusterAccelerationStructureMoveObjectsInfoNV( *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInfoNV const *>( &rhs ) )
    {
    }

    ClusterAccelerationStructureMoveObjectsInfoNV & operator=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ClusterAccelerationStructureMoveObjectsInfoNV & operator=( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ClusterAccelerationStructureMoveObjectsInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV &
      setSrcAccelerationStructure( DeviceAddress srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccelerationStructure = srcAccelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkClusterAccelerationStructureMoveObjectsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkClusterAccelerationStructureMoveObjectsInfoNV *>( this );
    }

    operator VkClusterAccelerationStructureMoveObjectsInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkClusterAccelerationStructureMoveObjectsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcAccelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClusterAccelerationStructureMoveObjectsInfoNV const & ) const = default;
#else
    bool operator==( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcAccelerationStructure == rhs.srcAccelerationStructure );
#  endif
    }

    bool operator!=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress srcAccelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkClusterAccelerationStructureMoveObjectsInfoNV>
  {
    using Type = ClusterAccelerationStructureMoveObjectsInfoNV;
  };
#endif

  // wrapper struct for struct VkCoarseSampleLocationNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleLocationNV.html
  struct CoarseSampleLocationNV
  {
    using NativeType = VkCoarseSampleLocationNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
      : pixelX{ pixelX_ }
      , pixelY{ pixelY_ }
      , sample{ sample_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
    {
    }

    CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
    {
      pixelX = pixelX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
    {
      pixelY = pixelY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
    {
      sample = sample_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCoarseSampleLocationNV *>( this );
    }

    operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCoarseSampleLocationNV *>( this );
    }

    operator VkCoarseSampleLocationNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCoarseSampleLocationNV *>( this );
    }

    operator VkCoarseSampleLocationNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCoarseSampleLocationNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( pixelX, pixelY, sample );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CoarseSampleLocationNV const & ) const = default;
#else
    bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample );
#  endif
    }

    bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t pixelX = {};
    uint32_t pixelY = {};
    uint32_t sample = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCoarseSampleLocationNV>
  {
    using Type = CoarseSampleLocationNV;
  };
#endif

  // wrapper struct for struct VkCoarseSampleOrderCustomNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleOrderCustomNV.html
  struct CoarseSampleOrderCustomNV
  {
    using NativeType = VkCoarseSampleOrderCustomNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV      shadingRate_         = ShadingRatePaletteEntryNV::eNoInvocations,
                                                    uint32_t                       sampleCount_         = {},
                                                    uint32_t                       sampleLocationCount_ = {},
                                                    const CoarseSampleLocationNV * pSampleLocations_    = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRate{ shadingRate_ }
      , sampleCount{ sampleCount_ }
      , sampleLocationCount{ sampleLocationCount_ }
      , pSampleLocations{ pSampleLocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV                                     shadingRate_,
                               uint32_t                                                      sampleCount_,
                               ArrayProxyNoTemporaries<const CoarseSampleLocationNV> const & sampleLocations_ )
      : shadingRate( shadingRate_ )
      , sampleCount( sampleCount_ )
      , sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) )
      , pSampleLocations( sampleLocations_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRate = shadingRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleCount = sampleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationCount = sampleLocationCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations( const CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleLocations = pSampleLocations_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CoarseSampleOrderCustomNV & setSampleLocations( ArrayProxyNoTemporaries<const CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
      pSampleLocations    = sampleLocations_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( this );
    }

    operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCoarseSampleOrderCustomNV *>( this );
    }

    operator VkCoarseSampleOrderCustomNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( this );
    }

    operator VkCoarseSampleOrderCustomNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCoarseSampleOrderCustomNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ShadingRatePaletteEntryNV const &, uint32_t const &, uint32_t const &, const CoarseSampleLocationNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
#else
    bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) &&
             ( pSampleLocations == rhs.pSampleLocations );
#  endif
    }

    bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ShadingRatePaletteEntryNV      shadingRate         = ShadingRatePaletteEntryNV::eNoInvocations;
    uint32_t                       sampleCount         = {};
    uint32_t                       sampleLocationCount = {};
    const CoarseSampleLocationNV * pSampleLocations    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCoarseSampleOrderCustomNV>
  {
    using Type = CoarseSampleOrderCustomNV;
  };
#endif

  // wrapper struct for struct VkColorBlendAdvancedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendAdvancedEXT.html
  struct ColorBlendAdvancedEXT
  {
    using NativeType = VkColorBlendAdvancedEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( BlendOp         advancedBlendOp_  = BlendOp::eAdd,
                                                Bool32          srcPremultiplied_ = {},
                                                Bool32          dstPremultiplied_ = {},
                                                BlendOverlapEXT blendOverlap_     = BlendOverlapEXT::eUncorrelated,
                                                Bool32          clampResults_     = {} ) VULKAN_HPP_NOEXCEPT
      : advancedBlendOp{ advancedBlendOp_ }
      , srcPremultiplied{ srcPremultiplied_ }
      , dstPremultiplied{ dstPremultiplied_ }
      , blendOverlap{ blendOverlap_ }
      , clampResults{ clampResults_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ColorBlendAdvancedEXT( *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs ) )
    {
    }

    ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ColorBlendAdvancedEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      advancedBlendOp = advancedBlendOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      srcPremultiplied = srcPremultiplied_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      dstPremultiplied = dstPremultiplied_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
    {
      blendOverlap = blendOverlap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT
    {
      clampResults = clampResults_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkColorBlendAdvancedEXT *>( this );
    }

    operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkColorBlendAdvancedEXT *>( this );
    }

    operator VkColorBlendAdvancedEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkColorBlendAdvancedEXT *>( this );
    }

    operator VkColorBlendAdvancedEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkColorBlendAdvancedEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<BlendOp const &, Bool32 const &, Bool32 const &, BlendOverlapEXT const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ColorBlendAdvancedEXT const & ) const = default;
#else
    bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
             ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults );
#  endif
    }

    bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    BlendOp         advancedBlendOp  = BlendOp::eAdd;
    Bool32          srcPremultiplied = {};
    Bool32          dstPremultiplied = {};
    BlendOverlapEXT blendOverlap     = BlendOverlapEXT::eUncorrelated;
    Bool32          clampResults     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkColorBlendAdvancedEXT>
  {
    using Type = ColorBlendAdvancedEXT;
  };
#endif

  // wrapper struct for struct VkColorBlendEquationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendEquationEXT.html
  struct ColorBlendEquationEXT
  {
    using NativeType = VkColorBlendEquationEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( BlendFactor srcColorBlendFactor_ = BlendFactor::eZero,
                                                BlendFactor dstColorBlendFactor_ = BlendFactor::eZero,
                                                BlendOp     colorBlendOp_        = BlendOp::eAdd,
                                                BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero,
                                                BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero,
                                                BlendOp     alphaBlendOp_        = BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT
      : srcColorBlendFactor{ srcColorBlendFactor_ }
      , dstColorBlendFactor{ dstColorBlendFactor_ }
      , colorBlendOp{ colorBlendOp_ }
      , srcAlphaBlendFactor{ srcAlphaBlendFactor_ }
      , dstAlphaBlendFactor{ dstAlphaBlendFactor_ }
      , alphaBlendOp{ alphaBlendOp_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ColorBlendEquationEXT( *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs ) )
    {
    }

    ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ColorBlendEquationEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcColorBlendFactor = srcColorBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstColorBlendFactor = dstColorBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      colorBlendOp = colorBlendOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAlphaBlendFactor = srcAlphaBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAlphaBlendFactor = dstAlphaBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaBlendOp = alphaBlendOp_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkColorBlendEquationEXT *>( this );
    }

    operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkColorBlendEquationEXT *>( this );
    }

    operator VkColorBlendEquationEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkColorBlendEquationEXT *>( this );
    }

    operator VkColorBlendEquationEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkColorBlendEquationEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<BlendFactor const &, BlendFactor const &, BlendOp const &, BlendFactor const &, BlendFactor const &, BlendOp const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ColorBlendEquationEXT const & ) const = default;
#else
    bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
             ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp );
#  endif
    }

    bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    BlendFactor srcColorBlendFactor = BlendFactor::eZero;
    BlendFactor dstColorBlendFactor = BlendFactor::eZero;
    BlendOp     colorBlendOp        = BlendOp::eAdd;
    BlendFactor srcAlphaBlendFactor = BlendFactor::eZero;
    BlendFactor dstAlphaBlendFactor = BlendFactor::eZero;
    BlendOp     alphaBlendOp        = BlendOp::eAdd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkColorBlendEquationEXT>
  {
    using Type = ColorBlendEquationEXT;
  };
#endif

  // wrapper struct for struct VkCommandBufferAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferAllocateInfo.html
  struct CommandBufferAllocateInfo
  {
    using NativeType = VkCommandBufferAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandPool        commandPool_        = {},
                                                    CommandBufferLevel level_              = CommandBufferLevel::ePrimary,
                                                    uint32_t           commandBufferCount_ = {},
                                                    const void *       pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , commandPool{ commandPool_ }
      , level{ level_ }
      , commandBufferCount{ commandBufferCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
    {
    }

    CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
    {
      commandPool = commandPool_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
    {
      level = level_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
    }

    operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
    }

    operator VkCommandBufferAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
    }

    operator VkCommandBufferAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CommandPool const &, CommandBufferLevel const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, commandPool, level, commandBufferCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
#else
    bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) &&
             ( commandBufferCount == rhs.commandBufferCount );
#  endif
    }

    bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType              = StructureType::eCommandBufferAllocateInfo;
    const void *       pNext              = {};
    CommandPool        commandPool        = {};
    CommandBufferLevel level              = CommandBufferLevel::ePrimary;
    uint32_t           commandBufferCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferAllocateInfo>
  {
    using Type = CommandBufferAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
  {
    using Type = CommandBufferAllocateInfo;
  };

  // wrapper struct for struct VkCommandBufferInheritanceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceInfo.html
  struct CommandBufferInheritanceInfo
  {
    using NativeType = VkCommandBufferInheritanceInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferInheritanceInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( RenderPass                  renderPass_           = {},
                                                       uint32_t                    subpass_              = {},
                                                       Framebuffer                 framebuffer_          = {},
                                                       Bool32                      occlusionQueryEnable_ = {},
                                                       QueryControlFlags           queryFlags_           = {},
                                                       QueryPipelineStatisticFlags pipelineStatistics_   = {},
                                                       const void *                pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , renderPass{ renderPass_ }
      , subpass{ subpass_ }
      , framebuffer{ framebuffer_ }
      , occlusionQueryEnable{ occlusionQueryEnable_ }
      , queryFlags{ queryFlags_ }
      , pipelineStatistics{ pipelineStatistics_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
    {
    }

    CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      framebuffer = framebuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      occlusionQueryEnable = occlusionQueryEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      queryFlags = queryFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatistics = pipelineStatistics_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
    }

    operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
    }

    operator VkCommandBufferInheritanceInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
    }

    operator VkCommandBufferInheritanceInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RenderPass const &,
               uint32_t const &,
               Framebuffer const &,
               Bool32 const &,
               QueryControlFlags const &,
               QueryPipelineStatisticFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) &&
             ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
             ( pipelineStatistics == rhs.pipelineStatistics );
#  endif
    }

    bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                = StructureType::eCommandBufferInheritanceInfo;
    const void *                pNext                = {};
    RenderPass                  renderPass           = {};
    uint32_t                    subpass              = {};
    Framebuffer                 framebuffer          = {};
    Bool32                      occlusionQueryEnable = {};
    QueryControlFlags           queryFlags           = {};
    QueryPipelineStatisticFlags pipelineStatistics   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferInheritanceInfo>
  {
    using Type = CommandBufferInheritanceInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
  {
    using Type = CommandBufferInheritanceInfo;
  };

  // wrapper struct for struct VkCommandBufferBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferBeginInfo.html
  struct CommandBufferBeginInfo
  {
    using NativeType = VkCommandBufferBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferUsageFlags              flags_            = {},
                                                 const CommandBufferInheritanceInfo * pInheritanceInfo_ = {},
                                                 const void *                         pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pInheritanceInfo{ pInheritanceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
    {
    }

    CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( const CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pInheritanceInfo = pInheritanceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
    }

    operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferBeginInfo *>( this );
    }

    operator VkCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
    }

    operator VkCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CommandBufferUsageFlags const &, const CommandBufferInheritanceInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pInheritanceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferBeginInfo const & ) const = default;
#else
    bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo );
#  endif
    }

    bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType            = StructureType::eCommandBufferBeginInfo;
    const void *                         pNext            = {};
    CommandBufferUsageFlags              flags            = {};
    const CommandBufferInheritanceInfo * pInheritanceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferBeginInfo>
  {
    using Type = CommandBufferBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
  {
    using Type = CommandBufferBeginInfo;
  };

  // wrapper struct for struct VkCommandBufferInheritanceConditionalRenderingInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html
  struct CommandBufferInheritanceConditionalRenderingInfoEXT
  {
    using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( Bool32       conditionalRenderingEnable_ = {},
                                                                              const void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , conditionalRenderingEnable{ conditionalRenderingEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
    {
    }

    CommandBufferInheritanceConditionalRenderingInfoEXT &
      operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT &
      setConditionalRenderingEnable( Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      conditionalRenderingEnable = conditionalRenderingEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, conditionalRenderingEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
#  endif
    }

    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
    const void *  pNext                      = {};
    Bool32        conditionalRenderingEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferInheritanceConditionalRenderingInfoEXT>
  {
    using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
  {
    using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
  };

  // wrapper struct for struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html
  struct CommandBufferInheritanceRenderPassTransformInfoQCOM
  {
    using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( SurfaceTransformFlagBitsKHR transform_  = SurfaceTransformFlagBitsKHR::eIdentity,
                                                                              Rect2D                      renderArea_ = {},
                                                                              const void *                pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , transform{ transform_ }
      , renderArea{ renderArea_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
    {
    }

    CommandBufferInheritanceRenderPassTransformInfoQCOM &
      operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
    {
      renderArea = renderArea_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceTransformFlagBitsKHR const &, Rect2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, transform, renderArea );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea );
#  endif
    }

    bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType      = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
    const void *                pNext      = {};
    SurfaceTransformFlagBitsKHR transform  = SurfaceTransformFlagBitsKHR::eIdentity;
    Rect2D                      renderArea = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferInheritanceRenderPassTransformInfoQCOM>
  {
    using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
  {
    using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
  };

  // wrapper struct for struct VkCommandBufferInheritanceRenderingInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderingInfo.html
  struct CommandBufferInheritanceRenderingInfo
  {
    using NativeType = VkCommandBufferInheritanceRenderingInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferInheritanceRenderingInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( RenderingFlags      flags_                   = {},
                                                                uint32_t            viewMask_                = {},
                                                                uint32_t            colorAttachmentCount_    = {},
                                                                const Format *      pColorAttachmentFormats_ = {},
                                                                Format              depthAttachmentFormat_   = Format::eUndefined,
                                                                Format              stencilAttachmentFormat_ = Format::eUndefined,
                                                                SampleCountFlagBits rasterizationSamples_    = SampleCountFlagBits::e1,
                                                                const void *        pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , viewMask{ viewMask_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentFormats{ pColorAttachmentFormats_ }
      , depthAttachmentFormat{ depthAttachmentFormat_ }
      , stencilAttachmentFormat{ stencilAttachmentFormat_ }
      , rasterizationSamples{ rasterizationSamples_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CommandBufferInheritanceRenderingInfo( RenderingFlags                                flags_,
                                           uint32_t                                      viewMask_,
                                           ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_,
                                           Format                                        depthAttachmentFormat_   = Format::eUndefined,
                                           Format                                        stencilAttachmentFormat_ = Format::eUndefined,
                                           SampleCountFlagBits                           rasterizationSamples_    = SampleCountFlagBits::e1,
                                           const void *                                  pNext_                   = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , viewMask( viewMask_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
      , pColorAttachmentFormats( colorAttachmentFormats_.data() )
      , depthAttachmentFormat( depthAttachmentFormat_ )
      , stencilAttachmentFormat( stencilAttachmentFormat_ )
      , rasterizationSamples( rasterizationSamples_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentFormats = pColorAttachmentFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CommandBufferInheritanceRenderingInfo &
      setColorAttachmentFormats( ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount    = static_cast<uint32_t>( colorAttachmentFormats_.size() );
      pColorAttachmentFormats = colorAttachmentFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      depthAttachmentFormat = depthAttachmentFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilAttachmentFormat = stencilAttachmentFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationSamples = rasterizationSamples_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo *>( this );
    }

    operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo *>( this );
    }

    operator VkCommandBufferInheritanceRenderingInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo *>( this );
    }

    operator VkCommandBufferInheritanceRenderingInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferInheritanceRenderingInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RenderingFlags const &,
               uint32_t const &,
               uint32_t const &,
               const Format * const &,
               Format const &,
               Format const &,
               SampleCountFlagBits const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) &&
             ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
             ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) &&
             ( rasterizationSamples == rhs.rasterizationSamples );
#  endif
    }

    bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                   = StructureType::eCommandBufferInheritanceRenderingInfo;
    const void *        pNext                   = {};
    RenderingFlags      flags                   = {};
    uint32_t            viewMask                = {};
    uint32_t            colorAttachmentCount    = {};
    const Format *      pColorAttachmentFormats = {};
    Format              depthAttachmentFormat   = Format::eUndefined;
    Format              stencilAttachmentFormat = Format::eUndefined;
    SampleCountFlagBits rasterizationSamples    = SampleCountFlagBits::e1;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferInheritanceRenderingInfo>
  {
    using Type = CommandBufferInheritanceRenderingInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
  {
    using Type = CommandBufferInheritanceRenderingInfo;
  };

  using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;

  // wrapper struct for struct VkViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewport.html
  struct Viewport
  {
    using NativeType = VkViewport;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
      , width{ width_ }
      , height{ height_ }
      , minDepth{ minDepth_ }
      , maxDepth{ maxDepth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) ) {}

    Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Viewport const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      minDepth = minDepth_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDepth = maxDepth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewport *>( this );
    }

    operator VkViewport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewport *>( this );
    }

    operator VkViewport const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkViewport *>( this );
    }

    operator VkViewport *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkViewport *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &, float const &, float const &, float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y, width, height, minDepth, maxDepth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Viewport const & ) const = default;
#else
    bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) &&
             ( maxDepth == rhs.maxDepth );
#  endif
    }

    bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x        = {};
    float y        = {};
    float width    = {};
    float height   = {};
    float minDepth = {};
    float maxDepth = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkViewport>
  {
    using Type = Viewport;
  };
#endif

  // wrapper struct for struct VkCommandBufferInheritanceViewportScissorInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceViewportScissorInfoNV.html
  struct CommandBufferInheritanceViewportScissorInfoNV
  {
    using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( Bool32           viewportScissor2D_  = {},
                                                                        uint32_t         viewportDepthCount_ = {},
                                                                        const Viewport * pViewportDepths_    = {},
                                                                        const void *     pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , viewportScissor2D{ viewportScissor2D_ }
      , viewportDepthCount{ viewportDepthCount_ }
      , pViewportDepths{ pViewportDepths_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
    {
    }

    CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D( Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportScissor2D = viewportScissor2D_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportDepthCount = viewportDepthCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths( const Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportDepths = pViewportDepths_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

    operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

    operator VkCommandBufferInheritanceViewportScissorInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

    operator VkCommandBufferInheritanceViewportScissorInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, const Viewport * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) &&
             ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths );
#  endif
    }

    bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType              = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
    const void *     pNext              = {};
    Bool32           viewportScissor2D  = {};
    uint32_t         viewportDepthCount = {};
    const Viewport * pViewportDepths    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferInheritanceViewportScissorInfoNV>
  {
    using Type = CommandBufferInheritanceViewportScissorInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
  {
    using Type = CommandBufferInheritanceViewportScissorInfoNV;
  };

  // wrapper struct for struct VkCommandBufferSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferSubmitInfo.html
  struct CommandBufferSubmitInfo
  {
    using NativeType = VkCommandBufferSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandBufferSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CommandBufferSubmitInfo( CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , commandBuffer{ commandBuffer_ }
      , deviceMask{ deviceMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
    {
    }

    CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBuffer = commandBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferSubmitInfo *>( this );
    }

    operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferSubmitInfo *>( this );
    }

    operator VkCommandBufferSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandBufferSubmitInfo *>( this );
    }

    operator VkCommandBufferSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandBufferSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CommandBuffer const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, commandBuffer, deviceMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
#else
    bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask );
#  endif
    }

    bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eCommandBufferSubmitInfo;
    const void *  pNext         = {};
    CommandBuffer commandBuffer = {};
    uint32_t      deviceMask    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandBufferSubmitInfo>
  {
    using Type = CommandBufferSubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
  {
    using Type = CommandBufferSubmitInfo;
  };

  using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;

  // wrapper struct for struct VkCommandPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateInfo.html
  struct CommandPoolCreateInfo
  {
    using NativeType = VkCommandPoolCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCommandPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
    {
    }

    CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
    }

    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandPoolCreateInfo *>( this );
    }

    operator VkCommandPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
    }

    operator VkCommandPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCommandPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CommandPoolCreateFlags const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, queueFamilyIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandPoolCreateInfo const & ) const = default;
#else
    bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex );
#  endif
    }

    bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType            = StructureType::eCommandPoolCreateInfo;
    const void *           pNext            = {};
    CommandPoolCreateFlags flags            = {};
    uint32_t               queueFamilyIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCommandPoolCreateInfo>
  {
    using Type = CommandPoolCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
  {
    using Type = CommandPoolCreateInfo;
  };

  // wrapper struct for struct VkSpecializationMapEntry, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationMapEntry.html
  struct SpecializationMapEntry
  {
    using NativeType = VkSpecializationMapEntry;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
      : constantID{ constantID_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
      : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
    {
    }

    SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SpecializationMapEntry const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
    {
      constantID = constantID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSpecializationMapEntry *>( this );
    }

    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSpecializationMapEntry *>( this );
    }

    operator VkSpecializationMapEntry const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSpecializationMapEntry *>( this );
    }

    operator VkSpecializationMapEntry *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSpecializationMapEntry *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, size_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( constantID, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SpecializationMapEntry const & ) const = default;
#else
    bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t constantID = {};
    uint32_t offset     = {};
    size_t   size       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSpecializationMapEntry>
  {
    using Type = SpecializationMapEntry;
  };
#endif

  // wrapper struct for struct VkSpecializationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationInfo.html
  struct SpecializationInfo
  {
    using NativeType = VkSpecializationInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t                       mapEntryCount_ = {},
                                             const SpecializationMapEntry * pMapEntries_   = {},
                                             size_t                         dataSize_      = {},
                                             const void *                   pData_         = {} ) VULKAN_HPP_NOEXCEPT
      : mapEntryCount{ mapEntryCount_ }
      , pMapEntries{ pMapEntries_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    SpecializationInfo( ArrayProxyNoTemporaries<const SpecializationMapEntry> const & mapEntries_, ArrayProxyNoTemporaries<const T> const & data_ = {} )
      : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) )
      , pMapEntries( mapEntries_.data() )
      , dataSize( data_.size() * sizeof( T ) )
      , pData( data_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SpecializationInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      mapEntryCount = mapEntryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pMapEntries = pMapEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SpecializationInfo & setMapEntries( ArrayProxyNoTemporaries<const SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
      pMapEntries   = mapEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    SpecializationInfo & setData( ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSpecializationInfo *>( this );
    }

    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSpecializationInfo *>( this );
    }

    operator VkSpecializationInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSpecializationInfo *>( this );
    }

    operator VkSpecializationInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSpecializationInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, const SpecializationMapEntry * const &, size_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SpecializationInfo const & ) const = default;
#else
    bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
#  endif
    }

    bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                       mapEntryCount = {};
    const SpecializationMapEntry * pMapEntries   = {};
    size_t                         dataSize      = {};
    const void *                   pData         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSpecializationInfo>
  {
    using Type = SpecializationInfo;
  };
#endif

  // wrapper struct for struct VkPipelineShaderStageCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateInfo.html
  struct PipelineShaderStageCreateInfo
  {
    using NativeType = VkPipelineShaderStageCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineShaderStageCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_               = {},
                                                        ShaderStageFlagBits            stage_               = ShaderStageFlagBits::eVertex,
                                                        ShaderModule                   module_              = {},
                                                        const char *                   pName_               = {},
                                                        const SpecializationInfo *     pSpecializationInfo_ = {},
                                                        const void *                   pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stage{ stage_ }
      , module{ module_ }
      , pName{ pName_ }
      , pSpecializationInfo{ pSpecializationInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
    {
    }

    PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpecializationInfo = pSpecializationInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
    }

    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
    }

    operator VkPipelineShaderStageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
    }

    operator VkPipelineShaderStageCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineShaderStageCreateFlags const &,
               ShaderStageFlagBits const &,
               ShaderModule const &,
               const char * const &,
               const SpecializationInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
        return cmp;
      if ( auto cmp = module <=> rhs.module; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) &&
             ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
    }

    bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                  sType               = StructureType::ePipelineShaderStageCreateInfo;
    const void *                   pNext               = {};
    PipelineShaderStageCreateFlags flags               = {};
    ShaderStageFlagBits            stage               = ShaderStageFlagBits::eVertex;
    ShaderModule                   module              = {};
    const char *                   pName               = {};
    const SpecializationInfo *     pSpecializationInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineShaderStageCreateInfo>
  {
    using Type = PipelineShaderStageCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
  {
    using Type = PipelineShaderStageCreateInfo;
  };

  // wrapper struct for struct VkComputePipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineCreateInfo.html
  struct ComputePipelineCreateInfo
  {
    using NativeType = VkComputePipelineCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eComputePipelineCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( PipelineCreateFlags           flags_              = {},
                                                    PipelineShaderStageCreateInfo stage_              = {},
                                                    PipelineLayout                layout_             = {},
                                                    Pipeline                      basePipelineHandle_ = {},
                                                    int32_t                       basePipelineIndex_  = {},
                                                    const void *                  pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stage{ stage_ }
      , layout{ layout_ }
      , basePipelineHandle{ basePipelineHandle_ }
      , basePipelineIndex{ basePipelineIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
    {
    }

    ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
    }

    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkComputePipelineCreateInfo *>( this );
    }

    operator VkComputePipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
    }

    operator VkComputePipelineCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkComputePipelineCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags const &,
               PipelineShaderStageCreateInfo const &,
               PipelineLayout const &,
               Pipeline const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
#else
    bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) &&
             ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
#  endif
    }

    bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType              = StructureType::eComputePipelineCreateInfo;
    const void *                  pNext              = {};
    PipelineCreateFlags           flags              = {};
    PipelineShaderStageCreateInfo stage              = {};
    PipelineLayout                layout             = {};
    Pipeline                      basePipelineHandle = {};
    int32_t                       basePipelineIndex  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkComputePipelineCreateInfo>
  {
    using Type = ComputePipelineCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
  {
    using Type = ComputePipelineCreateInfo;
  };

  // wrapper struct for struct VkComputePipelineIndirectBufferInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineIndirectBufferInfoNV.html
  struct ComputePipelineIndirectBufferInfoNV
  {
    using NativeType = VkComputePipelineIndirectBufferInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eComputePipelineIndirectBufferInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( DeviceAddress deviceAddress_                      = {},
                                                              DeviceSize    size_                               = {},
                                                              DeviceAddress pipelineDeviceAddressCaptureReplay_ = {},
                                                              const void *  pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceAddress{ deviceAddress_ }
      , size{ size_ }
      , pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast<ComputePipelineIndirectBufferInfoNV const *>( &rhs ) )
    {
    }

    ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ComputePipelineIndirectBufferInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV &
      setPipelineDeviceAddressCaptureReplay( DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkComputePipelineIndirectBufferInfoNV *>( this );
    }

    operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkComputePipelineIndirectBufferInfoNV *>( this );
    }

    operator VkComputePipelineIndirectBufferInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkComputePipelineIndirectBufferInfoNV *>( this );
    }

    operator VkComputePipelineIndirectBufferInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkComputePipelineIndirectBufferInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceAddress const &, DeviceSize const &, DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default;
#else
    bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) &&
             ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay );
#  endif
    }

    bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::eComputePipelineIndirectBufferInfoNV;
    const void *  pNext                              = {};
    DeviceAddress deviceAddress                      = {};
    DeviceSize    size                               = {};
    DeviceAddress pipelineDeviceAddressCaptureReplay = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkComputePipelineIndirectBufferInfoNV>
  {
    using Type = ComputePipelineIndirectBufferInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eComputePipelineIndirectBufferInfoNV>
  {
    using Type = ComputePipelineIndirectBufferInfoNV;
  };

  // wrapper struct for struct VkConditionalRenderingBeginInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingBeginInfoEXT.html
  struct ConditionalRenderingBeginInfoEXT
  {
    using NativeType = VkConditionalRenderingBeginInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eConditionalRenderingBeginInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( Buffer                       buffer_ = {},
                                                           DeviceSize                   offset_ = {},
                                                           ConditionalRenderingFlagsEXT flags_  = {},
                                                           const void *                 pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
      , offset{ offset_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
    {
    }

    ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( this );
    }

    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>( this );
    }

    operator VkConditionalRenderingBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( this );
    }

    operator VkConditionalRenderingBeginInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &, DeviceSize const &, ConditionalRenderingFlagsEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer, offset, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
#else
    bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType  = StructureType::eConditionalRenderingBeginInfoEXT;
    const void *                 pNext  = {};
    Buffer                       buffer = {};
    DeviceSize                   offset = {};
    ConditionalRenderingFlagsEXT flags  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkConditionalRenderingBeginInfoEXT>
  {
    using Type = ConditionalRenderingBeginInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
  {
    using Type = ConditionalRenderingBeginInfoEXT;
  };

  // wrapper struct for struct VkConformanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConformanceVersion.html
  struct ConformanceVersion
  {
    using NativeType = VkConformanceVersion;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
      : major{ major_ }
      , minor{ minor_ }
      , subminor{ subminor_ }
      , patch{ patch_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) ) {}

    ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ConformanceVersion const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
    {
      major = major_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
    {
      minor = minor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
    {
      subminor = subminor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
    {
      patch = patch_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkConformanceVersion *>( this );
    }

    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkConformanceVersion *>( this );
    }

    operator VkConformanceVersion const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkConformanceVersion *>( this );
    }

    operator VkConformanceVersion *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkConformanceVersion *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( major, minor, subminor, patch );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ConformanceVersion const & ) const = default;
#else
    bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch );
#  endif
    }

    bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint8_t major    = {};
    uint8_t minor    = {};
    uint8_t subminor = {};
    uint8_t patch    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkConformanceVersion>
  {
    using Type = ConformanceVersion;
  };
#endif
  using ConformanceVersionKHR = ConformanceVersion;

  // wrapper struct for struct VkConvertCooperativeVectorMatrixInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConvertCooperativeVectorMatrixInfoNV.html
  struct ConvertCooperativeVectorMatrixInfoNV
  {
    using NativeType = VkConvertCooperativeVectorMatrixInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eConvertCooperativeVectorMatrixInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( size_t                          srcSize_          = {},
                                                                  DeviceOrHostAddressConstKHR     srcData_          = {},
                                                                  size_t *                        pDstSize_         = {},
                                                                  DeviceOrHostAddressKHR          dstData_          = {},
                                                                  ComponentTypeKHR                srcComponentType_ = ComponentTypeKHR::eFloat16,
                                                                  ComponentTypeKHR                dstComponentType_ = ComponentTypeKHR::eFloat16,
                                                                  uint32_t                        numRows_          = {},
                                                                  uint32_t                        numColumns_       = {},
                                                                  CooperativeVectorMatrixLayoutNV srcLayout_ = CooperativeVectorMatrixLayoutNV::eRowMajor,
                                                                  size_t                          srcStride_ = {},
                                                                  CooperativeVectorMatrixLayoutNV dstLayout_ = CooperativeVectorMatrixLayoutNV::eRowMajor,
                                                                  size_t                          dstStride_ = {},
                                                                  const void *                    pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSize{ srcSize_ }
      , srcData{ srcData_ }
      , pDstSize{ pDstSize_ }
      , dstData{ dstData_ }
      , srcComponentType{ srcComponentType_ }
      , dstComponentType{ dstComponentType_ }
      , numRows{ numRows_ }
      , numColumns{ numColumns_ }
      , srcLayout{ srcLayout_ }
      , srcStride{ srcStride_ }
      , dstLayout{ dstLayout_ }
      , dstStride{ dstStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConvertCooperativeVectorMatrixInfoNV( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ConvertCooperativeVectorMatrixInfoNV( *reinterpret_cast<ConvertCooperativeVectorMatrixInfoNV const *>( &rhs ) )
    {
    }

    ConvertCooperativeVectorMatrixInfoNV & operator=( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ConvertCooperativeVectorMatrixInfoNV & operator=( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ConvertCooperativeVectorMatrixInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcSize( size_t srcSize_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSize = srcSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcData( DeviceOrHostAddressConstKHR const & srcData_ ) VULKAN_HPP_NOEXCEPT
    {
      srcData = srcData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPDstSize( size_t * pDstSize_ ) VULKAN_HPP_NOEXCEPT
    {
      pDstSize = pDstSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstData( DeviceOrHostAddressKHR const & dstData_ ) VULKAN_HPP_NOEXCEPT
    {
      dstData = dstData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcComponentType( ComponentTypeKHR srcComponentType_ ) VULKAN_HPP_NOEXCEPT
    {
      srcComponentType = srcComponentType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstComponentType( ComponentTypeKHR dstComponentType_ ) VULKAN_HPP_NOEXCEPT
    {
      dstComponentType = dstComponentType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumRows( uint32_t numRows_ ) VULKAN_HPP_NOEXCEPT
    {
      numRows = numRows_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumColumns( uint32_t numColumns_ ) VULKAN_HPP_NOEXCEPT
    {
      numColumns = numColumns_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcLayout( CooperativeVectorMatrixLayoutNV srcLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcLayout = srcLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcStride( size_t srcStride_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStride = srcStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstLayout( CooperativeVectorMatrixLayoutNV dstLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstLayout = dstLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstStride( size_t dstStride_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStride = dstStride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkConvertCooperativeVectorMatrixInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( this );
    }

    operator VkConvertCooperativeVectorMatrixInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkConvertCooperativeVectorMatrixInfoNV *>( this );
    }

    operator VkConvertCooperativeVectorMatrixInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( this );
    }

    operator VkConvertCooperativeVectorMatrixInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkConvertCooperativeVectorMatrixInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               size_t const &,
               DeviceOrHostAddressConstKHR const &,
               size_t * const &,
               DeviceOrHostAddressKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               uint32_t const &,
               uint32_t const &,
               CooperativeVectorMatrixLayoutNV const &,
               size_t const &,
               CooperativeVectorMatrixLayoutNV const &,
               size_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       srcSize,
                       srcData,
                       pDstSize,
                       dstData,
                       srcComponentType,
                       dstComponentType,
                       numRows,
                       numColumns,
                       srcLayout,
                       srcStride,
                       dstLayout,
                       dstStride );
    }
#endif

  public:
    StructureType                   sType            = StructureType::eConvertCooperativeVectorMatrixInfoNV;
    const void *                    pNext            = {};
    size_t                          srcSize          = {};
    DeviceOrHostAddressConstKHR     srcData          = {};
    size_t *                        pDstSize         = {};
    DeviceOrHostAddressKHR          dstData          = {};
    ComponentTypeKHR                srcComponentType = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR                dstComponentType = ComponentTypeKHR::eFloat16;
    uint32_t                        numRows          = {};
    uint32_t                        numColumns       = {};
    CooperativeVectorMatrixLayoutNV srcLayout        = CooperativeVectorMatrixLayoutNV::eRowMajor;
    size_t                          srcStride        = {};
    CooperativeVectorMatrixLayoutNV dstLayout        = CooperativeVectorMatrixLayoutNV::eRowMajor;
    size_t                          dstStride        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkConvertCooperativeVectorMatrixInfoNV>
  {
    using Type = ConvertCooperativeVectorMatrixInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eConvertCooperativeVectorMatrixInfoNV>
  {
    using Type = ConvertCooperativeVectorMatrixInfoNV;
  };

  // wrapper struct for struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixFlexibleDimensionsPropertiesNV.html
  struct CooperativeMatrixFlexibleDimensionsPropertiesNV
  {
    using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t         MGranularity_           = {},
                                                                          uint32_t         NGranularity_           = {},
                                                                          uint32_t         KGranularity_           = {},
                                                                          ComponentTypeKHR AType_                  = ComponentTypeKHR::eFloat16,
                                                                          ComponentTypeKHR BType_                  = ComponentTypeKHR::eFloat16,
                                                                          ComponentTypeKHR CType_                  = ComponentTypeKHR::eFloat16,
                                                                          ComponentTypeKHR ResultType_             = ComponentTypeKHR::eFloat16,
                                                                          Bool32           saturatingAccumulation_ = {},
                                                                          ScopeKHR         scope_                  = ScopeKHR::eDevice,
                                                                          uint32_t         workgroupInvocations_   = {},
                                                                          void *           pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , MGranularity{ MGranularity_ }
      , NGranularity{ NGranularity_ }
      , KGranularity{ KGranularity_ }
      , AType{ AType_ }
      , BType{ BType_ }
      , CType{ CType_ }
      , ResultType{ ResultType_ }
      , saturatingAccumulation{ saturatingAccumulation_ }
      , scope{ scope_ }
      , workgroupInvocations{ workgroupInvocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast<CooperativeMatrixFlexibleDimensionsPropertiesNV const *>( &rhs ) )
    {
    }

    CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CooperativeMatrixFlexibleDimensionsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               Bool32 const &,
               ScopeKHR const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default;
#else
    bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) &&
             ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) &&
             ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) &&
             ( workgroupInvocations == rhs.workgroupInvocations );
#  endif
    }

    bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                  = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV;
    void *           pNext                  = {};
    uint32_t         MGranularity           = {};
    uint32_t         NGranularity           = {};
    uint32_t         KGranularity           = {};
    ComponentTypeKHR AType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR BType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR CType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR ResultType             = ComponentTypeKHR::eFloat16;
    Bool32           saturatingAccumulation = {};
    ScopeKHR         scope                  = ScopeKHR::eDevice;
    uint32_t         workgroupInvocations   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCooperativeMatrixFlexibleDimensionsPropertiesNV>
  {
    using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV>
  {
    using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV;
  };

  // wrapper struct for struct VkCooperativeMatrixPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesKHR.html
  struct CooperativeMatrixPropertiesKHR
  {
    using NativeType = VkCooperativeMatrixPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCooperativeMatrixPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( uint32_t         MSize_                  = {},
                                                         uint32_t         NSize_                  = {},
                                                         uint32_t         KSize_                  = {},
                                                         ComponentTypeKHR AType_                  = ComponentTypeKHR::eFloat16,
                                                         ComponentTypeKHR BType_                  = ComponentTypeKHR::eFloat16,
                                                         ComponentTypeKHR CType_                  = ComponentTypeKHR::eFloat16,
                                                         ComponentTypeKHR ResultType_             = ComponentTypeKHR::eFloat16,
                                                         Bool32           saturatingAccumulation_ = {},
                                                         ScopeKHR         scope_                  = ScopeKHR::eDevice,
                                                         void *           pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , MSize{ MSize_ }
      , NSize{ NSize_ }
      , KSize{ KSize_ }
      , AType{ AType_ }
      , BType{ BType_ }
      , CType{ CType_ }
      , ResultType{ ResultType_ }
      , saturatingAccumulation{ saturatingAccumulation_ }
      , scope{ scope_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CooperativeMatrixPropertiesKHR( *reinterpret_cast<CooperativeMatrixPropertiesKHR const *>( &rhs ) )
    {
    }

    CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CooperativeMatrixPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               Bool32 const &,
               ScopeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default;
#else
    bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) &&
             ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) &&
             ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope );
#  endif
    }

    bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                  = StructureType::eCooperativeMatrixPropertiesKHR;
    void *           pNext                  = {};
    uint32_t         MSize                  = {};
    uint32_t         NSize                  = {};
    uint32_t         KSize                  = {};
    ComponentTypeKHR AType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR BType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR CType                  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR ResultType             = ComponentTypeKHR::eFloat16;
    Bool32           saturatingAccumulation = {};
    ScopeKHR         scope                  = ScopeKHR::eDevice;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCooperativeMatrixPropertiesKHR>
  {
    using Type = CooperativeMatrixPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesKHR>
  {
    using Type = CooperativeMatrixPropertiesKHR;
  };

  // wrapper struct for struct VkCooperativeMatrixPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesNV.html
  struct CooperativeMatrixPropertiesNV
  {
    using NativeType = VkCooperativeMatrixPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCooperativeMatrixPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t        MSize_ = {},
                                                        uint32_t        NSize_ = {},
                                                        uint32_t        KSize_ = {},
                                                        ComponentTypeNV AType_ = {},
                                                        ComponentTypeNV BType_ = {},
                                                        ComponentTypeNV CType_ = {},
                                                        ComponentTypeNV DType_ = {},
                                                        ScopeNV         scope_ = {},
                                                        void *          pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , MSize{ MSize_ }
      , NSize{ NSize_ }
      , KSize{ KSize_ }
      , AType{ AType_ }
      , BType{ BType_ }
      , CType{ CType_ }
      , DType{ DType_ }
      , scope{ scope_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
    {
    }

    CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ComponentTypeNV const &,
               ComponentTypeNV const &,
               ComponentTypeNV const &,
               ComponentTypeNV const &,
               ScopeNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
#else
    bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) &&
             ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope );
#  endif
    }

    bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType = StructureType::eCooperativeMatrixPropertiesNV;
    void *          pNext = {};
    uint32_t        MSize = {};
    uint32_t        NSize = {};
    uint32_t        KSize = {};
    ComponentTypeNV AType = {};
    ComponentTypeNV BType = {};
    ComponentTypeNV CType = {};
    ComponentTypeNV DType = {};
    ScopeNV         scope = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCooperativeMatrixPropertiesNV>
  {
    using Type = CooperativeMatrixPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
  {
    using Type = CooperativeMatrixPropertiesNV;
  };

  // wrapper struct for struct VkCooperativeVectorPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeVectorPropertiesNV.html
  struct CooperativeVectorPropertiesNV
  {
    using NativeType = VkCooperativeVectorPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCooperativeVectorPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( ComponentTypeKHR inputType_            = ComponentTypeKHR::eFloat16,
                                                        ComponentTypeKHR inputInterpretation_  = ComponentTypeKHR::eFloat16,
                                                        ComponentTypeKHR matrixInterpretation_ = ComponentTypeKHR::eFloat16,
                                                        ComponentTypeKHR biasInterpretation_   = ComponentTypeKHR::eFloat16,
                                                        ComponentTypeKHR resultType_           = ComponentTypeKHR::eFloat16,
                                                        Bool32           transpose_            = {},
                                                        void *           pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , inputType{ inputType_ }
      , inputInterpretation{ inputInterpretation_ }
      , matrixInterpretation{ matrixInterpretation_ }
      , biasInterpretation{ biasInterpretation_ }
      , resultType{ resultType_ }
      , transpose{ transpose_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeVectorPropertiesNV( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CooperativeVectorPropertiesNV( *reinterpret_cast<CooperativeVectorPropertiesNV const *>( &rhs ) )
    {
    }

    CooperativeVectorPropertiesNV & operator=( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CooperativeVectorPropertiesNV & operator=( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CooperativeVectorPropertiesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputType( ComponentTypeKHR inputType_ ) VULKAN_HPP_NOEXCEPT
    {
      inputType = inputType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputInterpretation( ComponentTypeKHR inputInterpretation_ ) VULKAN_HPP_NOEXCEPT
    {
      inputInterpretation = inputInterpretation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setMatrixInterpretation( ComponentTypeKHR matrixInterpretation_ ) VULKAN_HPP_NOEXCEPT
    {
      matrixInterpretation = matrixInterpretation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setBiasInterpretation( ComponentTypeKHR biasInterpretation_ ) VULKAN_HPP_NOEXCEPT
    {
      biasInterpretation = biasInterpretation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setResultType( ComponentTypeKHR resultType_ ) VULKAN_HPP_NOEXCEPT
    {
      resultType = resultType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setTranspose( Bool32 transpose_ ) VULKAN_HPP_NOEXCEPT
    {
      transpose = transpose_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCooperativeVectorPropertiesNV *>( this );
    }

    operator VkCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCooperativeVectorPropertiesNV *>( this );
    }

    operator VkCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCooperativeVectorPropertiesNV *>( this );
    }

    operator VkCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCooperativeVectorPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               ComponentTypeKHR const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, inputType, inputInterpretation, matrixInterpretation, biasInterpretation, resultType, transpose );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CooperativeVectorPropertiesNV const & ) const = default;
#else
    bool operator==( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inputType == rhs.inputType ) && ( inputInterpretation == rhs.inputInterpretation ) &&
             ( matrixInterpretation == rhs.matrixInterpretation ) && ( biasInterpretation == rhs.biasInterpretation ) && ( resultType == rhs.resultType ) &&
             ( transpose == rhs.transpose );
#  endif
    }

    bool operator!=( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                = StructureType::eCooperativeVectorPropertiesNV;
    void *           pNext                = {};
    ComponentTypeKHR inputType            = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR inputInterpretation  = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR matrixInterpretation = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR biasInterpretation   = ComponentTypeKHR::eFloat16;
    ComponentTypeKHR resultType           = ComponentTypeKHR::eFloat16;
    Bool32           transpose            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCooperativeVectorPropertiesNV>
  {
    using Type = CooperativeVectorPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCooperativeVectorPropertiesNV>
  {
    using Type = CooperativeVectorPropertiesNV;
  };

  // wrapper struct for struct VkCopyAccelerationStructureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureInfoKHR.html
  struct CopyAccelerationStructureInfoKHR
  {
    using NativeType = VkCopyAccelerationStructureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyAccelerationStructureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( AccelerationStructureKHR         src_   = {},
                                                           AccelerationStructureKHR         dst_   = {},
                                                           CopyAccelerationStructureModeKHR mode_  = CopyAccelerationStructureModeKHR::eClone,
                                                           const void *                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
    {
    }

    CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyAccelerationStructureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureKHR const &,
               AccelerationStructureKHR const &,
               CopyAccelerationStructureModeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
#else
    bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
#  endif
    }

    bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType = StructureType::eCopyAccelerationStructureInfoKHR;
    const void *                     pNext = {};
    AccelerationStructureKHR         src   = {};
    AccelerationStructureKHR         dst   = {};
    CopyAccelerationStructureModeKHR mode  = CopyAccelerationStructureModeKHR::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyAccelerationStructureInfoKHR>
  {
    using Type = CopyAccelerationStructureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
  {
    using Type = CopyAccelerationStructureInfoKHR;
  };

  // wrapper struct for struct VkCopyAccelerationStructureToMemoryInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureToMemoryInfoKHR.html
  struct CopyAccelerationStructureToMemoryInfoKHR
  {
    using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( AccelerationStructureKHR         src_   = {},
                                                                      DeviceOrHostAddressKHR           dst_   = {},
                                                                      CopyAccelerationStructureModeKHR mode_  = CopyAccelerationStructureModeKHR::eClone,
                                                                      const void *                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
    {
    }

    CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureToMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureToMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccelerationStructureKHR const &,
               DeviceOrHostAddressKHR const &,
               CopyAccelerationStructureModeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

  public:
    StructureType                    sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
    const void *                     pNext = {};
    AccelerationStructureKHR         src   = {};
    DeviceOrHostAddressKHR           dst   = {};
    CopyAccelerationStructureModeKHR mode  = CopyAccelerationStructureModeKHR::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyAccelerationStructureToMemoryInfoKHR>
  {
    using Type = CopyAccelerationStructureToMemoryInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
  {
    using Type = CopyAccelerationStructureToMemoryInfoKHR;
  };

  // wrapper struct for struct VkCopyBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferInfo2.html
  struct CopyBufferInfo2
  {
    using NativeType = VkCopyBufferInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyBufferInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyBufferInfo2( Buffer              srcBuffer_   = {},
                                          Buffer              dstBuffer_   = {},
                                          uint32_t            regionCount_ = {},
                                          const BufferCopy2 * pRegions_    = {},
                                          const void *        pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcBuffer{ srcBuffer_ }
      , dstBuffer{ dstBuffer_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferInfo2( Buffer srcBuffer_, Buffer dstBuffer_, ArrayProxyNoTemporaries<const BufferCopy2> const & regions_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyBufferInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBuffer = dstBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferInfo2 & setRegions( ArrayProxyNoTemporaries<const BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyBufferInfo2 *>( this );
    }

    operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyBufferInfo2 *>( this );
    }

    operator VkCopyBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyBufferInfo2 *>( this );
    }

    operator VkCopyBufferInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyBufferInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &, Buffer const &, uint32_t const &, const BufferCopy2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyBufferInfo2 const & ) const = default;
#else
    bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) &&
             ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType       = StructureType::eCopyBufferInfo2;
    const void *        pNext       = {};
    Buffer              srcBuffer   = {};
    Buffer              dstBuffer   = {};
    uint32_t            regionCount = {};
    const BufferCopy2 * pRegions    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyBufferInfo2>
  {
    using Type = CopyBufferInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyBufferInfo2>
  {
    using Type = CopyBufferInfo2;
  };

  using CopyBufferInfo2KHR = CopyBufferInfo2;

  // wrapper struct for struct VkCopyBufferToImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferToImageInfo2.html
  struct CopyBufferToImageInfo2
  {
    using NativeType = VkCopyBufferToImageInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyBufferToImageInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( Buffer                   srcBuffer_      = {},
                                                 Image                    dstImage_       = {},
                                                 ImageLayout              dstImageLayout_ = ImageLayout::eUndefined,
                                                 uint32_t                 regionCount_    = {},
                                                 const BufferImageCopy2 * pRegions_       = {},
                                                 const void *             pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcBuffer{ srcBuffer_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferToImageInfo2( Buffer                                                  srcBuffer_,
                            Image                                                   dstImage_,
                            ImageLayout                                             dstImageLayout_,
                            ArrayProxyNoTemporaries<const BufferImageCopy2> const & regions_,
                            const void *                                            pNext_ = nullptr )
      : pNext( pNext_ )
      , srcBuffer( srcBuffer_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferToImageInfo2 & setRegions( ArrayProxyNoTemporaries<const BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyBufferToImageInfo2 *>( this );
    }

    operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyBufferToImageInfo2 *>( this );
    }

    operator VkCopyBufferToImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyBufferToImageInfo2 *>( this );
    }

    operator VkCopyBufferToImageInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyBufferToImageInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, Buffer const &, Image const &, ImageLayout const &, uint32_t const &, const BufferImageCopy2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
#else
    bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) &&
             ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType          = StructureType::eCopyBufferToImageInfo2;
    const void *             pNext          = {};
    Buffer                   srcBuffer      = {};
    Image                    dstImage       = {};
    ImageLayout              dstImageLayout = ImageLayout::eUndefined;
    uint32_t                 regionCount    = {};
    const BufferImageCopy2 * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyBufferToImageInfo2>
  {
    using Type = CopyBufferToImageInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
  {
    using Type = CopyBufferToImageInfo2;
  };

  using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;

  // wrapper struct for struct VkCopyCommandTransformInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyCommandTransformInfoQCOM.html
  struct CopyCommandTransformInfoQCOM
  {
    using NativeType = VkCopyCommandTransformInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyCommandTransformInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity,
                                                       const void *                pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , transform{ transform_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
    {
    }

    CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM *>( this );
    }

    operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyCommandTransformInfoQCOM *>( this );
    }

    operator VkCopyCommandTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyCommandTransformInfoQCOM *>( this );
    }

    operator VkCopyCommandTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyCommandTransformInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceTransformFlagBitsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, transform );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
#else
    bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
#  endif
    }

    bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType     = StructureType::eCopyCommandTransformInfoQCOM;
    const void *                pNext     = {};
    SurfaceTransformFlagBitsKHR transform = SurfaceTransformFlagBitsKHR::eIdentity;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyCommandTransformInfoQCOM>
  {
    using Type = CopyCommandTransformInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
  {
    using Type = CopyCommandTransformInfoQCOM;
  };

  // wrapper struct for struct VkCopyDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyDescriptorSet.html
  struct CopyDescriptorSet
  {
    using NativeType = VkCopyDescriptorSet;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyDescriptorSet;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( DescriptorSet srcSet_          = {},
                                            uint32_t      srcBinding_      = {},
                                            uint32_t      srcArrayElement_ = {},
                                            DescriptorSet dstSet_          = {},
                                            uint32_t      dstBinding_      = {},
                                            uint32_t      dstArrayElement_ = {},
                                            uint32_t      descriptorCount_ = {},
                                            const void *  pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSet{ srcSet_ }
      , srcBinding{ srcBinding_ }
      , srcArrayElement{ srcArrayElement_ }
      , dstSet{ dstSet_ }
      , dstBinding{ dstBinding_ }
      , dstArrayElement{ dstArrayElement_ }
      , descriptorCount{ descriptorCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) ) {}

    CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyDescriptorSet const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSet = srcSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBinding = srcBinding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      srcArrayElement = srcArrayElement_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSet = dstSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyDescriptorSet *>( this );
    }

    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyDescriptorSet *>( this );
    }

    operator VkCopyDescriptorSet const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyDescriptorSet *>( this );
    }

    operator VkCopyDescriptorSet *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyDescriptorSet *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DescriptorSet const &,
               uint32_t const &,
               uint32_t const &,
               DescriptorSet const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyDescriptorSet const & ) const = default;
#else
    bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) &&
             ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
             ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount );
#  endif
    }

    bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eCopyDescriptorSet;
    const void *  pNext           = {};
    DescriptorSet srcSet          = {};
    uint32_t      srcBinding      = {};
    uint32_t      srcArrayElement = {};
    DescriptorSet dstSet          = {};
    uint32_t      dstBinding      = {};
    uint32_t      dstArrayElement = {};
    uint32_t      descriptorCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyDescriptorSet>
  {
    using Type = CopyDescriptorSet;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
  {
    using Type = CopyDescriptorSet;
  };

  // wrapper struct for struct VkImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy2.html
  struct ImageCopy2
  {
    using NativeType = VkImageCopy2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageCopy2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCopy2( ImageSubresourceLayers srcSubresource_ = {},
                                     Offset3D               srcOffset_      = {},
                                     ImageSubresourceLayers dstSubresource_ = {},
                                     Offset3D               dstOffset_      = {},
                                     Extent3D               extent_         = {},
                                     const void *           pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSubresource{ srcSubresource_ }
      , srcOffset{ srcOffset_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffset{ dstOffset_ }
      , extent{ extent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) ) {}

    ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCopy2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCopy2 *>( this );
    }

    operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCopy2 *>( this );
    }

    operator VkImageCopy2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCopy2 *>( this );
    }

    operator VkImageCopy2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCopy2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCopy2 const & ) const = default;
#else
    bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
#  endif
    }

    bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType          = StructureType::eImageCopy2;
    const void *           pNext          = {};
    ImageSubresourceLayers srcSubresource = {};
    Offset3D               srcOffset      = {};
    ImageSubresourceLayers dstSubresource = {};
    Offset3D               dstOffset      = {};
    Extent3D               extent         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCopy2>
  {
    using Type = ImageCopy2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageCopy2>
  {
    using Type = ImageCopy2;
  };

  using ImageCopy2KHR = ImageCopy2;

  // wrapper struct for struct VkCopyImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageInfo2.html
  struct CopyImageInfo2
  {
    using NativeType = VkCopyImageInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyImageInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageInfo2( Image              srcImage_       = {},
                                         ImageLayout        srcImageLayout_ = ImageLayout::eUndefined,
                                         Image              dstImage_       = {},
                                         ImageLayout        dstImageLayout_ = ImageLayout::eUndefined,
                                         uint32_t           regionCount_    = {},
                                         const ImageCopy2 * pRegions_       = {},
                                         const void *       pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageInfo2( Image                                             srcImage_,
                    ImageLayout                                       srcImageLayout_,
                    Image                                             dstImage_,
                    ImageLayout                                       dstImageLayout_,
                    ArrayProxyNoTemporaries<const ImageCopy2> const & regions_,
                    const void *                                      pNext_ = nullptr )
      : pNext( pNext_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyImageInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageInfo2 & setRegions( ArrayProxyNoTemporaries<const ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageInfo2 *>( this );
    }

    operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageInfo2 *>( this );
    }

    operator VkCopyImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyImageInfo2 *>( this );
    }

    operator VkCopyImageInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyImageInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Image const &,
               ImageLayout const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const ImageCopy2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageInfo2 const & ) const = default;
#else
    bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
             ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType          = StructureType::eCopyImageInfo2;
    const void *       pNext          = {};
    Image              srcImage       = {};
    ImageLayout        srcImageLayout = ImageLayout::eUndefined;
    Image              dstImage       = {};
    ImageLayout        dstImageLayout = ImageLayout::eUndefined;
    uint32_t           regionCount    = {};
    const ImageCopy2 * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyImageInfo2>
  {
    using Type = CopyImageInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyImageInfo2>
  {
    using Type = CopyImageInfo2;
  };

  using CopyImageInfo2KHR = CopyImageInfo2;

  // wrapper struct for struct VkCopyImageToBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToBufferInfo2.html
  struct CopyImageToBufferInfo2
  {
    using NativeType = VkCopyImageToBufferInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyImageToBufferInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( Image                    srcImage_       = {},
                                                 ImageLayout              srcImageLayout_ = ImageLayout::eUndefined,
                                                 Buffer                   dstBuffer_      = {},
                                                 uint32_t                 regionCount_    = {},
                                                 const BufferImageCopy2 * pRegions_       = {},
                                                 const void *             pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , dstBuffer{ dstBuffer_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToBufferInfo2( Image                                                   srcImage_,
                            ImageLayout                                             srcImageLayout_,
                            Buffer                                                  dstBuffer_,
                            ArrayProxyNoTemporaries<const BufferImageCopy2> const & regions_,
                            const void *                                            pNext_ = nullptr )
      : pNext( pNext_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstBuffer( dstBuffer_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBuffer = dstBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToBufferInfo2 & setRegions( ArrayProxyNoTemporaries<const BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageToBufferInfo2 *>( this );
    }

    operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageToBufferInfo2 *>( this );
    }

    operator VkCopyImageToBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyImageToBufferInfo2 *>( this );
    }

    operator VkCopyImageToBufferInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyImageToBufferInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, Image const &, ImageLayout const &, Buffer const &, uint32_t const &, const BufferImageCopy2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
#else
    bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
             ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType          = StructureType::eCopyImageToBufferInfo2;
    const void *             pNext          = {};
    Image                    srcImage       = {};
    ImageLayout              srcImageLayout = ImageLayout::eUndefined;
    Buffer                   dstBuffer      = {};
    uint32_t                 regionCount    = {};
    const BufferImageCopy2 * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyImageToBufferInfo2>
  {
    using Type = CopyImageToBufferInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
  {
    using Type = CopyImageToBufferInfo2;
  };

  using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;

  // wrapper struct for struct VkCopyImageToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToImageInfo.html
  struct CopyImageToImageInfo
  {
    using NativeType = VkCopyImageToImageInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyImageToImageInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( HostImageCopyFlags flags_          = {},
                                               Image              srcImage_       = {},
                                               ImageLayout        srcImageLayout_ = ImageLayout::eUndefined,
                                               Image              dstImage_       = {},
                                               ImageLayout        dstImageLayout_ = ImageLayout::eUndefined,
                                               uint32_t           regionCount_    = {},
                                               const ImageCopy2 * pRegions_       = {},
                                               const void *       pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyImageToImageInfo( *reinterpret_cast<CopyImageToImageInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToImageInfo( HostImageCopyFlags                                flags_,
                          Image                                             srcImage_,
                          ImageLayout                                       srcImageLayout_,
                          Image                                             dstImage_,
                          ImageLayout                                       dstImageLayout_,
                          ArrayProxyNoTemporaries<const ImageCopy2> const & regions_,
                          const void *                                      pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyImageToImageInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToImageInfo & setRegions( ArrayProxyNoTemporaries<const ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageToImageInfo *>( this );
    }

    operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageToImageInfo *>( this );
    }

    operator VkCopyImageToImageInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyImageToImageInfo *>( this );
    }

    operator VkCopyImageToImageInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyImageToImageInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               HostImageCopyFlags const &,
               Image const &,
               ImageLayout const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const ImageCopy2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageToImageInfo const & ) const = default;
#else
    bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) &&
             ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType          = StructureType::eCopyImageToImageInfo;
    const void *       pNext          = {};
    HostImageCopyFlags flags          = {};
    Image              srcImage       = {};
    ImageLayout        srcImageLayout = ImageLayout::eUndefined;
    Image              dstImage       = {};
    ImageLayout        dstImageLayout = ImageLayout::eUndefined;
    uint32_t           regionCount    = {};
    const ImageCopy2 * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyImageToImageInfo>
  {
    using Type = CopyImageToImageInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyImageToImageInfo>
  {
    using Type = CopyImageToImageInfo;
  };

  using CopyImageToImageInfoEXT = CopyImageToImageInfo;

  // wrapper struct for struct VkImageToMemoryCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageToMemoryCopy.html
  struct ImageToMemoryCopy
  {
    using NativeType = VkImageToMemoryCopy;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageToMemoryCopy;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( void *                 pHostPointer_      = {},
                                            uint32_t               memoryRowLength_   = {},
                                            uint32_t               memoryImageHeight_ = {},
                                            ImageSubresourceLayers imageSubresource_  = {},
                                            Offset3D               imageOffset_       = {},
                                            Extent3D               imageExtent_       = {},
                                            const void *           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pHostPointer{ pHostPointer_ }
      , memoryRowLength{ memoryRowLength_ }
      , memoryImageHeight{ memoryImageHeight_ }
      , imageSubresource{ imageSubresource_ }
      , imageOffset{ imageOffset_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageToMemoryCopy( *reinterpret_cast<ImageToMemoryCopy const *>( &rhs ) ) {}

    ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageToMemoryCopy const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
    {
      pHostPointer = pHostPointer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryRowLength = memoryRowLength_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryImageHeight = memoryImageHeight_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageToMemoryCopy *>( this );
    }

    operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageToMemoryCopy *>( this );
    }

    operator VkImageToMemoryCopy const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageToMemoryCopy *>( this );
    }

    operator VkImageToMemoryCopy *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageToMemoryCopy *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageToMemoryCopy const & ) const = default;
#else
    bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) &&
             ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
             ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType             = StructureType::eImageToMemoryCopy;
    const void *           pNext             = {};
    void *                 pHostPointer      = {};
    uint32_t               memoryRowLength   = {};
    uint32_t               memoryImageHeight = {};
    ImageSubresourceLayers imageSubresource  = {};
    Offset3D               imageOffset       = {};
    Extent3D               imageExtent       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageToMemoryCopy>
  {
    using Type = ImageToMemoryCopy;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageToMemoryCopy>
  {
    using Type = ImageToMemoryCopy;
  };

  using ImageToMemoryCopyEXT = ImageToMemoryCopy;

  // wrapper struct for struct VkCopyImageToMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToMemoryInfo.html
  struct CopyImageToMemoryInfo
  {
    using NativeType = VkCopyImageToMemoryInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyImageToMemoryInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( HostImageCopyFlags        flags_          = {},
                                                Image                     srcImage_       = {},
                                                ImageLayout               srcImageLayout_ = ImageLayout::eUndefined,
                                                uint32_t                  regionCount_    = {},
                                                const ImageToMemoryCopy * pRegions_       = {},
                                                const void *              pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyImageToMemoryInfo( *reinterpret_cast<CopyImageToMemoryInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToMemoryInfo( HostImageCopyFlags                                       flags_,
                           Image                                                    srcImage_,
                           ImageLayout                                              srcImageLayout_,
                           ArrayProxyNoTemporaries<const ImageToMemoryCopy> const & regions_,
                           const void *                                             pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyImageToMemoryInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const ImageToMemoryCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToMemoryInfo & setRegions( ArrayProxyNoTemporaries<const ImageToMemoryCopy> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageToMemoryInfo *>( this );
    }

    operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageToMemoryInfo *>( this );
    }

    operator VkCopyImageToMemoryInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyImageToMemoryInfo *>( this );
    }

    operator VkCopyImageToMemoryInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyImageToMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               HostImageCopyFlags const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const ImageToMemoryCopy * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageToMemoryInfo const & ) const = default;
#else
    bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType          = StructureType::eCopyImageToMemoryInfo;
    const void *              pNext          = {};
    HostImageCopyFlags        flags          = {};
    Image                     srcImage       = {};
    ImageLayout               srcImageLayout = ImageLayout::eUndefined;
    uint32_t                  regionCount    = {};
    const ImageToMemoryCopy * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyImageToMemoryInfo>
  {
    using Type = CopyImageToMemoryInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyImageToMemoryInfo>
  {
    using Type = CopyImageToMemoryInfo;
  };

  using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo;

  // wrapper struct for struct VkCopyMemoryIndirectCommandKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectCommandKHR.html
  struct CopyMemoryIndirectCommandKHR
  {
    using NativeType = VkCopyMemoryIndirectCommandKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CopyMemoryIndirectCommandKHR( DeviceAddress srcAddress_ = {}, DeviceAddress dstAddress_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAddress{ srcAddress_ }
      , dstAddress{ dstAddress_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandKHR( CopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryIndirectCommandKHR( VkCopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryIndirectCommandKHR( *reinterpret_cast<CopyMemoryIndirectCommandKHR const *>( &rhs ) )
    {
    }

    CopyMemoryIndirectCommandKHR & operator=( CopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryIndirectCommandKHR & operator=( VkCopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryIndirectCommandKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setSrcAddress( DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAddress = srcAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setDstAddress( DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAddress = dstAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryIndirectCommandKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceAddress const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcAddress, dstAddress, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMemoryIndirectCommandKHR const & ) const = default;
#else
    bool operator==( CopyMemoryIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( CopyMemoryIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress srcAddress = {};
    DeviceAddress dstAddress = {};
    DeviceSize    size       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryIndirectCommandKHR>
  {
    using Type = CopyMemoryIndirectCommandKHR;
  };
#endif
  using CopyMemoryIndirectCommandNV = CopyMemoryIndirectCommandKHR;

  // wrapper struct for struct VkStridedDeviceAddressRangeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRangeKHR.html
  struct StridedDeviceAddressRangeKHR
  {
    using NativeType = VkStridedDeviceAddressRangeKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRangeKHR( DeviceAddress address_ = {}, DeviceSize size_ = {}, DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : address{ address_ }
      , size{ size_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR StridedDeviceAddressRangeKHR( StridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StridedDeviceAddressRangeKHR( VkStridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : StridedDeviceAddressRangeKHR( *reinterpret_cast<StridedDeviceAddressRangeKHR const *>( &rhs ) )
    {
    }

    StridedDeviceAddressRangeKHR & operator=( StridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    StridedDeviceAddressRangeKHR & operator=( VkStridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<StridedDeviceAddressRangeKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setAddress( DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
    {
      address = address_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setStride( DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkStridedDeviceAddressRangeKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStridedDeviceAddressRangeKHR *>( this );
    }

    operator VkStridedDeviceAddressRangeKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStridedDeviceAddressRangeKHR *>( this );
    }

    operator VkStridedDeviceAddressRangeKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkStridedDeviceAddressRangeKHR *>( this );
    }

    operator VkStridedDeviceAddressRangeKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkStridedDeviceAddressRangeKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( address, size, stride );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StridedDeviceAddressRangeKHR const & ) const = default;
#else
    bool operator==( StridedDeviceAddressRangeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( address == rhs.address ) && ( size == rhs.size ) && ( stride == rhs.stride );
#  endif
    }

    bool operator!=( StridedDeviceAddressRangeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress address = {};
    DeviceSize    size    = {};
    DeviceSize    stride  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkStridedDeviceAddressRangeKHR>
  {
    using Type = StridedDeviceAddressRangeKHR;
  };
#endif

  // wrapper struct for struct VkCopyMemoryIndirectInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectInfoKHR.html
  struct CopyMemoryIndirectInfoKHR
  {
    using NativeType = VkCopyMemoryIndirectInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMemoryIndirectInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyMemoryIndirectInfoKHR( AddressCopyFlagsKHR          srcCopyFlags_     = {},
                                                    AddressCopyFlagsKHR          dstCopyFlags_     = {},
                                                    uint32_t                     copyCount_        = {},
                                                    StridedDeviceAddressRangeKHR copyAddressRange_ = {},
                                                    const void *                 pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcCopyFlags{ srcCopyFlags_ }
      , dstCopyFlags{ dstCopyFlags_ }
      , copyCount{ copyCount_ }
      , copyAddressRange{ copyAddressRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMemoryIndirectInfoKHR( CopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryIndirectInfoKHR( VkCopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryIndirectInfoKHR( *reinterpret_cast<CopyMemoryIndirectInfoKHR const *>( &rhs ) )
    {
    }

    CopyMemoryIndirectInfoKHR & operator=( CopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryIndirectInfoKHR & operator=( VkCopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryIndirectInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      srcCopyFlags = srcCopyFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setDstCopyFlags( AddressCopyFlagsKHR dstCopyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dstCopyFlags = dstCopyFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setCopyCount( uint32_t copyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      copyCount = copyCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) VULKAN_HPP_NOEXCEPT
    {
      copyAddressRange = copyAddressRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryIndirectInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryIndirectInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryIndirectInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryIndirectInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryIndirectInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AddressCopyFlagsKHR const &,
               AddressCopyFlagsKHR const &,
               uint32_t const &,
               StridedDeviceAddressRangeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcCopyFlags, dstCopyFlags, copyCount, copyAddressRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMemoryIndirectInfoKHR const & ) const = default;
#else
    bool operator==( CopyMemoryIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcCopyFlags == rhs.srcCopyFlags ) && ( dstCopyFlags == rhs.dstCopyFlags ) &&
             ( copyCount == rhs.copyCount ) && ( copyAddressRange == rhs.copyAddressRange );
#  endif
    }

    bool operator!=( CopyMemoryIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType            = StructureType::eCopyMemoryIndirectInfoKHR;
    const void *                 pNext            = {};
    AddressCopyFlagsKHR          srcCopyFlags     = {};
    AddressCopyFlagsKHR          dstCopyFlags     = {};
    uint32_t                     copyCount        = {};
    StridedDeviceAddressRangeKHR copyAddressRange = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryIndirectInfoKHR>
  {
    using Type = CopyMemoryIndirectInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryIndirectInfoKHR>
  {
    using Type = CopyMemoryIndirectInfoKHR;
  };

  // wrapper struct for struct VkCopyMemoryToAccelerationStructureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToAccelerationStructureInfoKHR.html
  struct CopyMemoryToAccelerationStructureInfoKHR
  {
    using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( DeviceOrHostAddressConstKHR      src_   = {},
                                                                      AccelerationStructureKHR         dst_   = {},
                                                                      CopyAccelerationStructureModeKHR mode_  = CopyAccelerationStructureModeKHR::eClone,
                                                                      const void *                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
    {
    }

    CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc( DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyMemoryToAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyMemoryToAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DeviceOrHostAddressConstKHR const &,
               AccelerationStructureKHR const &,
               CopyAccelerationStructureModeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

  public:
    StructureType                    sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
    const void *                     pNext = {};
    DeviceOrHostAddressConstKHR      src   = {};
    AccelerationStructureKHR         dst   = {};
    CopyAccelerationStructureModeKHR mode  = CopyAccelerationStructureModeKHR::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryToAccelerationStructureInfoKHR>
  {
    using Type = CopyMemoryToAccelerationStructureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
  {
    using Type = CopyMemoryToAccelerationStructureInfoKHR;
  };

  // wrapper struct for struct VkCopyMemoryToImageIndirectCommandKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectCommandKHR.html
  struct CopyMemoryToImageIndirectCommandKHR
  {
    using NativeType = VkCopyMemoryToImageIndirectCommandKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandKHR( DeviceAddress          srcAddress_        = {},
                                                              uint32_t               bufferRowLength_   = {},
                                                              uint32_t               bufferImageHeight_ = {},
                                                              ImageSubresourceLayers imageSubresource_  = {},
                                                              Offset3D               imageOffset_       = {},
                                                              Extent3D               imageExtent_       = {} ) VULKAN_HPP_NOEXCEPT
      : srcAddress{ srcAddress_ }
      , bufferRowLength{ bufferRowLength_ }
      , bufferImageHeight{ bufferImageHeight_ }
      , imageSubresource{ imageSubresource_ }
      , imageOffset{ imageOffset_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandKHR( CopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToImageIndirectCommandKHR( VkCopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryToImageIndirectCommandKHR( *reinterpret_cast<CopyMemoryToImageIndirectCommandKHR const *>( &rhs ) )
    {
    }

    CopyMemoryToImageIndirectCommandKHR & operator=( CopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryToImageIndirectCommandKHR & operator=( VkCopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryToImageIndirectCommandKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setSrcAddress( DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAddress = srcAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferRowLength = bufferRowLength_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferImageHeight = bufferImageHeight_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageOffset( Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageExtent( Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryToImageIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToImageIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToImageIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryToImageIndirectCommandKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryToImageIndirectCommandKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, uint32_t const &, ImageSubresourceLayers const &, Offset3D const &, Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMemoryToImageIndirectCommandKHR const & ) const = default;
#else
    bool operator==( CopyMemoryToImageIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
             ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( CopyMemoryToImageIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress          srcAddress        = {};
    uint32_t               bufferRowLength   = {};
    uint32_t               bufferImageHeight = {};
    ImageSubresourceLayers imageSubresource  = {};
    Offset3D               imageOffset       = {};
    Extent3D               imageExtent       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryToImageIndirectCommandKHR>
  {
    using Type = CopyMemoryToImageIndirectCommandKHR;
  };
#endif
  using CopyMemoryToImageIndirectCommandNV = CopyMemoryToImageIndirectCommandKHR;

  // wrapper struct for struct VkCopyMemoryToImageIndirectInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectInfoKHR.html
  struct CopyMemoryToImageIndirectInfoKHR
  {
    using NativeType = VkCopyMemoryToImageIndirectInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMemoryToImageIndirectInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectInfoKHR( AddressCopyFlagsKHR            srcCopyFlags_       = {},
                                                           uint32_t                       copyCount_          = {},
                                                           StridedDeviceAddressRangeKHR   copyAddressRange_   = {},
                                                           Image                          dstImage_           = {},
                                                           ImageLayout                    dstImageLayout_     = ImageLayout::eUndefined,
                                                           const ImageSubresourceLayers * pImageSubresources_ = {},
                                                           const void *                   pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcCopyFlags{ srcCopyFlags_ }
      , copyCount{ copyCount_ }
      , copyAddressRange{ copyAddressRange_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , pImageSubresources{ pImageSubresources_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectInfoKHR( CopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToImageIndirectInfoKHR( VkCopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryToImageIndirectInfoKHR( *reinterpret_cast<CopyMemoryToImageIndirectInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyMemoryToImageIndirectInfoKHR( AddressCopyFlagsKHR                                           srcCopyFlags_,
                                      StridedDeviceAddressRangeKHR                                  copyAddressRange_,
                                      Image                                                         dstImage_,
                                      ImageLayout                                                   dstImageLayout_,
                                      ArrayProxyNoTemporaries<const ImageSubresourceLayers> const & imageSubresources_,
                                      const void *                                                  pNext_ = nullptr )
      : pNext( pNext_ )
      , srcCopyFlags( srcCopyFlags_ )
      , copyCount( static_cast<uint32_t>( imageSubresources_.size() ) )
      , copyAddressRange( copyAddressRange_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , pImageSubresources( imageSubresources_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyMemoryToImageIndirectInfoKHR & operator=( CopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryToImageIndirectInfoKHR & operator=( VkCopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryToImageIndirectInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      srcCopyFlags = srcCopyFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setCopyCount( uint32_t copyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      copyCount = copyCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) VULKAN_HPP_NOEXCEPT
    {
      copyAddressRange = copyAddressRange_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setPImageSubresources( const ImageSubresourceLayers * pImageSubresources_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageSubresources = pImageSubresources_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyMemoryToImageIndirectInfoKHR &
      setImageSubresources( ArrayProxyNoTemporaries<const ImageSubresourceLayers> const & imageSubresources_ ) VULKAN_HPP_NOEXCEPT
    {
      copyCount          = static_cast<uint32_t>( imageSubresources_.size() );
      pImageSubresources = imageSubresources_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryToImageIndirectInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToImageIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToImageIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryToImageIndirectInfoKHR *>( this );
    }

    operator VkCopyMemoryToImageIndirectInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryToImageIndirectInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AddressCopyFlagsKHR const &,
               uint32_t const &,
               StridedDeviceAddressRangeKHR const &,
               Image const &,
               ImageLayout const &,
               const ImageSubresourceLayers * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcCopyFlags, copyCount, copyAddressRange, dstImage, dstImageLayout, pImageSubresources );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMemoryToImageIndirectInfoKHR const & ) const = default;
#else
    bool operator==( CopyMemoryToImageIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcCopyFlags == rhs.srcCopyFlags ) && ( copyCount == rhs.copyCount ) &&
             ( copyAddressRange == rhs.copyAddressRange ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) &&
             ( pImageSubresources == rhs.pImageSubresources );
#  endif
    }

    bool operator!=( CopyMemoryToImageIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType              = StructureType::eCopyMemoryToImageIndirectInfoKHR;
    const void *                   pNext              = {};
    AddressCopyFlagsKHR            srcCopyFlags       = {};
    uint32_t                       copyCount          = {};
    StridedDeviceAddressRangeKHR   copyAddressRange   = {};
    Image                          dstImage           = {};
    ImageLayout                    dstImageLayout     = ImageLayout::eUndefined;
    const ImageSubresourceLayers * pImageSubresources = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryToImageIndirectInfoKHR>
  {
    using Type = CopyMemoryToImageIndirectInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryToImageIndirectInfoKHR>
  {
    using Type = CopyMemoryToImageIndirectInfoKHR;
  };

  // wrapper struct for struct VkMemoryToImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryToImageCopy.html
  struct MemoryToImageCopy
  {
    using NativeType = VkMemoryToImageCopy;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryToImageCopy;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryToImageCopy( const void *           pHostPointer_      = {},
                                            uint32_t               memoryRowLength_   = {},
                                            uint32_t               memoryImageHeight_ = {},
                                            ImageSubresourceLayers imageSubresource_  = {},
                                            Offset3D               imageOffset_       = {},
                                            Extent3D               imageExtent_       = {},
                                            const void *           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pHostPointer{ pHostPointer_ }
      , memoryRowLength{ memoryRowLength_ }
      , memoryImageHeight{ memoryImageHeight_ }
      , imageSubresource{ imageSubresource_ }
      , imageOffset{ imageOffset_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryToImageCopy( *reinterpret_cast<MemoryToImageCopy const *>( &rhs ) ) {}

    MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryToImageCopy const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
    {
      pHostPointer = pHostPointer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryRowLength = memoryRowLength_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryImageHeight = memoryImageHeight_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryToImageCopy *>( this );
    }

    operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryToImageCopy *>( this );
    }

    operator VkMemoryToImageCopy const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryToImageCopy *>( this );
    }

    operator VkMemoryToImageCopy *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryToImageCopy *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               const void * const &,
               uint32_t const &,
               uint32_t const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryToImageCopy const & ) const = default;
#else
    bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) &&
             ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
             ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType             = StructureType::eMemoryToImageCopy;
    const void *           pNext             = {};
    const void *           pHostPointer      = {};
    uint32_t               memoryRowLength   = {};
    uint32_t               memoryImageHeight = {};
    ImageSubresourceLayers imageSubresource  = {};
    Offset3D               imageOffset       = {};
    Extent3D               imageExtent       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryToImageCopy>
  {
    using Type = MemoryToImageCopy;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryToImageCopy>
  {
    using Type = MemoryToImageCopy;
  };

  using MemoryToImageCopyEXT = MemoryToImageCopy;

  // wrapper struct for struct VkCopyMemoryToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageInfo.html
  struct CopyMemoryToImageInfo
  {
    using NativeType = VkCopyMemoryToImageInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMemoryToImageInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( HostImageCopyFlags        flags_          = {},
                                                Image                     dstImage_       = {},
                                                ImageLayout               dstImageLayout_ = ImageLayout::eUndefined,
                                                uint32_t                  regionCount_    = {},
                                                const MemoryToImageCopy * pRegions_       = {},
                                                const void *              pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryToImageInfo( *reinterpret_cast<CopyMemoryToImageInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyMemoryToImageInfo( HostImageCopyFlags                                       flags_,
                           Image                                                    dstImage_,
                           ImageLayout                                              dstImageLayout_,
                           ArrayProxyNoTemporaries<const MemoryToImageCopy> const & regions_,
                           const void *                                             pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryToImageInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const MemoryToImageCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyMemoryToImageInfo & setRegions( ArrayProxyNoTemporaries<const MemoryToImageCopy> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToImageInfo *>( this );
    }

    operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToImageInfo *>( this );
    }

    operator VkCopyMemoryToImageInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryToImageInfo *>( this );
    }

    operator VkCopyMemoryToImageInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryToImageInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               HostImageCopyFlags const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const MemoryToImageCopy * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMemoryToImageInfo const & ) const = default;
#else
    bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) &&
             ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType          = StructureType::eCopyMemoryToImageInfo;
    const void *              pNext          = {};
    HostImageCopyFlags        flags          = {};
    Image                     dstImage       = {};
    ImageLayout               dstImageLayout = ImageLayout::eUndefined;
    uint32_t                  regionCount    = {};
    const MemoryToImageCopy * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryToImageInfo>
  {
    using Type = CopyMemoryToImageInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryToImageInfo>
  {
    using Type = CopyMemoryToImageInfo;
  };

  using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo;

  // wrapper struct for struct VkCopyMemoryToMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToMicromapInfoEXT.html
  struct CopyMemoryToMicromapInfoEXT
  {
    using NativeType = VkCopyMemoryToMicromapInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMemoryToMicromapInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( DeviceOrHostAddressConstKHR src_   = {},
                                                         MicromapEXT                 dst_   = {},
                                                         CopyMicromapModeEXT         mode_  = CopyMicromapModeEXT::eClone,
                                                         const void *                pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMemoryToMicromapInfoEXT( *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs ) )
    {
    }

    CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMemoryToMicromapInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( this );
    }

    operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToMicromapInfoEXT *>( this );
    }

    operator VkCopyMemoryToMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( this );
    }

    operator VkCopyMemoryToMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMemoryToMicromapInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceOrHostAddressConstKHR const &, MicromapEXT const &, CopyMicromapModeEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

  public:
    StructureType               sType = StructureType::eCopyMemoryToMicromapInfoEXT;
    const void *                pNext = {};
    DeviceOrHostAddressConstKHR src   = {};
    MicromapEXT                 dst   = {};
    CopyMicromapModeEXT         mode  = CopyMicromapModeEXT::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMemoryToMicromapInfoEXT>
  {
    using Type = CopyMemoryToMicromapInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryToMicromapInfoEXT>
  {
    using Type = CopyMemoryToMicromapInfoEXT;
  };

  // wrapper struct for struct VkCopyMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapInfoEXT.html
  struct CopyMicromapInfoEXT
  {
    using NativeType = VkCopyMicromapInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMicromapInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( MicromapEXT         src_   = {},
                                              MicromapEXT         dst_   = {},
                                              CopyMicromapModeEXT mode_  = CopyMicromapModeEXT::eClone,
                                              const void *        pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs ) )
    {
    }

    CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMicromapInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMicromapInfoEXT *>( this );
    }

    operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMicromapInfoEXT *>( this );
    }

    operator VkCopyMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMicromapInfoEXT *>( this );
    }

    operator VkCopyMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMicromapInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MicromapEXT const &, MicromapEXT const &, CopyMicromapModeEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyMicromapInfoEXT const & ) const = default;
#else
    bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode );
#  endif
    }

    bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType = StructureType::eCopyMicromapInfoEXT;
    const void *        pNext = {};
    MicromapEXT         src   = {};
    MicromapEXT         dst   = {};
    CopyMicromapModeEXT mode  = CopyMicromapModeEXT::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMicromapInfoEXT>
  {
    using Type = CopyMicromapInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMicromapInfoEXT>
  {
    using Type = CopyMicromapInfoEXT;
  };

  // wrapper struct for struct VkCopyMicromapToMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapToMemoryInfoEXT.html
  struct CopyMicromapToMemoryInfoEXT
  {
    using NativeType = VkCopyMicromapToMemoryInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyMicromapToMemoryInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( MicromapEXT            src_   = {},
                                                         DeviceOrHostAddressKHR dst_   = {},
                                                         CopyMicromapModeEXT    mode_  = CopyMicromapModeEXT::eClone,
                                                         const void *           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , src{ src_ }
      , dst{ dst_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyMicromapToMemoryInfoEXT( *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs ) )
    {
    }

    CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyMicromapToMemoryInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( this );
    }

    operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMicromapToMemoryInfoEXT *>( this );
    }

    operator VkCopyMicromapToMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( this );
    }

    operator VkCopyMicromapToMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyMicromapToMemoryInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MicromapEXT const &, DeviceOrHostAddressKHR const &, CopyMicromapModeEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, src, dst, mode );
    }
#endif

  public:
    StructureType          sType = StructureType::eCopyMicromapToMemoryInfoEXT;
    const void *           pNext = {};
    MicromapEXT            src   = {};
    DeviceOrHostAddressKHR dst   = {};
    CopyMicromapModeEXT    mode  = CopyMicromapModeEXT::eClone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyMicromapToMemoryInfoEXT>
  {
    using Type = CopyMicromapToMemoryInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyMicromapToMemoryInfoEXT>
  {
    using Type = CopyMicromapToMemoryInfoEXT;
  };

  // wrapper struct for struct VkTensorCopyARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCopyARM.html
  struct TensorCopyARM
  {
    using NativeType = VkTensorCopyARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorCopyARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorCopyARM( uint32_t         dimensionCount_ = {},
                                        const uint64_t * pSrcOffset_     = {},
                                        const uint64_t * pDstOffset_     = {},
                                        const uint64_t * pExtent_        = {},
                                        const void *     pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dimensionCount{ dimensionCount_ }
      , pSrcOffset{ pSrcOffset_ }
      , pDstOffset{ pDstOffset_ }
      , pExtent{ pExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorCopyARM( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorCopyARM( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT : TensorCopyARM( *reinterpret_cast<TensorCopyARM const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCopyARM( ArrayProxyNoTemporaries<const uint64_t> const & srcOffset_,
                   ArrayProxyNoTemporaries<const uint64_t> const & dstOffset_ = {},
                   ArrayProxyNoTemporaries<const uint64_t> const & extent_    = {},
                   const void *                                    pNext_     = nullptr )
      : pNext( pNext_ )
      , dimensionCount( static_cast<uint32_t>( srcOffset_.size() ) )
      , pSrcOffset( srcOffset_.data() )
      , pDstOffset( dstOffset_.data() )
      , pExtent( extent_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( srcOffset_.empty() || dstOffset_.empty() || ( srcOffset_.size() == dstOffset_.size() ) );
      VULKAN_HPP_ASSERT( srcOffset_.empty() || extent_.empty() || ( srcOffset_.size() == extent_.size() ) );
      VULKAN_HPP_ASSERT( dstOffset_.empty() || extent_.empty() || ( dstOffset_.size() == extent_.size() ) );
#    else
      if ( !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() )" );
      }
      if ( !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() )" );
      }
      if ( !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::TensorCopyARM::TensorCopyARM: !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    TensorCopyARM & operator=( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorCopyARM & operator=( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorCopyARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setDimensionCount( uint32_t dimensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = dimensionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPSrcOffset( const uint64_t * pSrcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      pSrcOffset = pSrcOffset_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCopyARM & setSrcOffset( ArrayProxyNoTemporaries<const uint64_t> const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = static_cast<uint32_t>( srcOffset_.size() );
      pSrcOffset     = srcOffset_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPDstOffset( const uint64_t * pDstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      pDstOffset = pDstOffset_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCopyARM & setDstOffset( ArrayProxyNoTemporaries<const uint64_t> const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = static_cast<uint32_t>( dstOffset_.size() );
      pDstOffset     = dstOffset_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPExtent( const uint64_t * pExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      pExtent = pExtent_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCopyARM & setExtent( ArrayProxyNoTemporaries<const uint64_t> const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = static_cast<uint32_t>( extent_.size() );
      pExtent        = extent_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorCopyARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorCopyARM *>( this );
    }

    operator VkTensorCopyARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorCopyARM *>( this );
    }

    operator VkTensorCopyARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorCopyARM *>( this );
    }

    operator VkTensorCopyARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorCopyARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, const uint64_t * const &, const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dimensionCount, pSrcOffset, pDstOffset, pExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorCopyARM const & ) const = default;
#else
    bool operator==( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dimensionCount == rhs.dimensionCount ) && ( pSrcOffset == rhs.pSrcOffset ) &&
             ( pDstOffset == rhs.pDstOffset ) && ( pExtent == rhs.pExtent );
#  endif
    }

    bool operator!=( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType          = StructureType::eTensorCopyARM;
    const void *     pNext          = {};
    uint32_t         dimensionCount = {};
    const uint64_t * pSrcOffset     = {};
    const uint64_t * pDstOffset     = {};
    const uint64_t * pExtent        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorCopyARM>
  {
    using Type = TensorCopyARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorCopyARM>
  {
    using Type = TensorCopyARM;
  };

  // wrapper struct for struct VkCopyTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyTensorInfoARM.html
  struct CopyTensorInfoARM
  {
    using NativeType = VkCopyTensorInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCopyTensorInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyTensorInfoARM( TensorARM             srcTensor_   = {},
                                            TensorARM             dstTensor_   = {},
                                            uint32_t              regionCount_ = {},
                                            const TensorCopyARM * pRegions_    = {},
                                            const void *          pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcTensor{ srcTensor_ }
      , dstTensor{ dstTensor_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CopyTensorInfoARM( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyTensorInfoARM( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : CopyTensorInfoARM( *reinterpret_cast<CopyTensorInfoARM const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyTensorInfoARM( TensorARM                                            srcTensor_,
                       TensorARM                                            dstTensor_,
                       ArrayProxyNoTemporaries<const TensorCopyARM> const & regions_,
                       const void *                                         pNext_ = nullptr )
      : pNext( pNext_ ), srcTensor( srcTensor_ ), dstTensor( dstTensor_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CopyTensorInfoARM & operator=( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CopyTensorInfoARM & operator=( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CopyTensorInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setSrcTensor( TensorARM srcTensor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcTensor = srcTensor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setDstTensor( TensorARM dstTensor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstTensor = dstTensor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPRegions( const TensorCopyARM * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyTensorInfoARM & setRegions( ArrayProxyNoTemporaries<const TensorCopyARM> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCopyTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyTensorInfoARM *>( this );
    }

    operator VkCopyTensorInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyTensorInfoARM *>( this );
    }

    operator VkCopyTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCopyTensorInfoARM *>( this );
    }

    operator VkCopyTensorInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCopyTensorInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorARM const &, TensorARM const &, uint32_t const &, const TensorCopyARM * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcTensor, dstTensor, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyTensorInfoARM const & ) const = default;
#else
    bool operator==( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcTensor == rhs.srcTensor ) && ( dstTensor == rhs.dstTensor ) &&
             ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType       = StructureType::eCopyTensorInfoARM;
    const void *          pNext       = {};
    TensorARM             srcTensor   = {};
    TensorARM             dstTensor   = {};
    uint32_t              regionCount = {};
    const TensorCopyARM * pRegions    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCopyTensorInfoARM>
  {
    using Type = CopyTensorInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCopyTensorInfoARM>
  {
    using Type = CopyTensorInfoARM;
  };

  // wrapper struct for struct VkCuFunctionCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionCreateInfoNVX.html
  struct CuFunctionCreateInfoNVX
  {
    using NativeType = VkCuFunctionCreateInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCuFunctionCreateInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , module{ module_ }
      , pName{ pName_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
    {
    }

    CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( this );
    }

    operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuFunctionCreateInfoNVX *>( this );
    }

    operator VkCuFunctionCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( this );
    }

    operator VkCuFunctionCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCuFunctionCreateInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CuModuleNVX const &, const char * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, module, pName );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = module <=> rhs.module; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
    }

    bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType  = StructureType::eCuFunctionCreateInfoNVX;
    const void *  pNext  = {};
    CuModuleNVX   module = {};
    const char *  pName  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCuFunctionCreateInfoNVX>
  {
    using Type = CuFunctionCreateInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
  {
    using Type = CuFunctionCreateInfoNVX;
  };

  // wrapper struct for struct VkCuLaunchInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuLaunchInfoNVX.html
  struct CuLaunchInfoNVX
  {
    using NativeType = VkCuLaunchInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCuLaunchInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuFunctionNVX        function_       = {},
                                          uint32_t             gridDimX_       = {},
                                          uint32_t             gridDimY_       = {},
                                          uint32_t             gridDimZ_       = {},
                                          uint32_t             blockDimX_      = {},
                                          uint32_t             blockDimY_      = {},
                                          uint32_t             blockDimZ_      = {},
                                          uint32_t             sharedMemBytes_ = {},
                                          size_t               paramCount_     = {},
                                          const void * const * pParams_        = {},
                                          size_t               extraCount_     = {},
                                          const void * const * pExtras_        = {},
                                          const void *         pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , function{ function_ }
      , gridDimX{ gridDimX_ }
      , gridDimY{ gridDimY_ }
      , gridDimZ{ gridDimZ_ }
      , blockDimX{ blockDimX_ }
      , blockDimY{ blockDimY_ }
      , blockDimZ{ blockDimZ_ }
      , sharedMemBytes{ sharedMemBytes_ }
      , paramCount{ paramCount_ }
      , pParams{ pParams_ }
      , extraCount{ extraCount_ }
      , pExtras{ pExtras_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CuLaunchInfoNVX( CuFunctionNVX                                       function_,
                     uint32_t                                            gridDimX_,
                     uint32_t                                            gridDimY_,
                     uint32_t                                            gridDimZ_,
                     uint32_t                                            blockDimX_,
                     uint32_t                                            blockDimY_,
                     uint32_t                                            blockDimZ_,
                     uint32_t                                            sharedMemBytes_,
                     ArrayProxyNoTemporaries<const void * const> const & params_,
                     ArrayProxyNoTemporaries<const void * const> const & extras_ = {},
                     const void *                                        pNext_  = nullptr )
      : pNext( pNext_ )
      , function( function_ )
      , gridDimX( gridDimX_ )
      , gridDimY( gridDimY_ )
      , gridDimZ( gridDimZ_ )
      , blockDimX( blockDimX_ )
      , blockDimY( blockDimY_ )
      , blockDimZ( blockDimZ_ )
      , sharedMemBytes( sharedMemBytes_ )
      , paramCount( params_.size() )
      , pParams( params_.data() )
      , extraCount( extras_.size() )
      , pExtras( extras_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
    {
      function = function_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimX = gridDimX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimY = gridDimY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimZ = gridDimZ_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimX = blockDimX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimY = blockDimY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimZ = blockDimZ_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      sharedMemBytes = sharedMemBytes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
    {
      paramCount = paramCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
    {
      pParams = pParams_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CuLaunchInfoNVX & setParams( ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
    {
      paramCount = params_.size();
      pParams    = params_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
    {
      extraCount = extraCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
    {
      pExtras = pExtras_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CuLaunchInfoNVX & setExtras( ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
    {
      extraCount = extras_.size();
      pExtras    = extras_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuLaunchInfoNVX *>( this );
    }

    operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuLaunchInfoNVX *>( this );
    }

    operator VkCuLaunchInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCuLaunchInfoNVX *>( this );
    }

    operator VkCuLaunchInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCuLaunchInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               CuFunctionNVX const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               size_t const &,
               const void * const * const &,
               size_t const &,
               const void * const * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuLaunchInfoNVX const & ) const = default;
#else
    bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) &&
             ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
             ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) &&
             ( pExtras == rhs.pExtras );
#  endif
    }

    bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType          = StructureType::eCuLaunchInfoNVX;
    const void *         pNext          = {};
    CuFunctionNVX        function       = {};
    uint32_t             gridDimX       = {};
    uint32_t             gridDimY       = {};
    uint32_t             gridDimZ       = {};
    uint32_t             blockDimX      = {};
    uint32_t             blockDimY      = {};
    uint32_t             blockDimZ      = {};
    uint32_t             sharedMemBytes = {};
    size_t               paramCount     = {};
    const void * const * pParams        = {};
    size_t               extraCount     = {};
    const void * const * pExtras        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCuLaunchInfoNVX>
  {
    using Type = CuLaunchInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
  {
    using Type = CuLaunchInfoNVX;
  };

  // wrapper struct for struct VkCuModuleCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleCreateInfoNVX.html
  struct CuModuleCreateInfoNVX
  {
    using NativeType = VkCuModuleCreateInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCuModuleCreateInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    CuModuleCreateInfoNVX( ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    CuModuleCreateInfoNVX & setData( ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuModuleCreateInfoNVX *>( this );
    }

    operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuModuleCreateInfoNVX *>( this );
    }

    operator VkCuModuleCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCuModuleCreateInfoNVX *>( this );
    }

    operator VkCuModuleCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCuModuleCreateInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, size_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
#else
    bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
#  endif
    }

    bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eCuModuleCreateInfoNVX;
    const void *  pNext    = {};
    size_t        dataSize = {};
    const void *  pData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCuModuleCreateInfoNVX>
  {
    using Type = CuModuleCreateInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
  {
    using Type = CuModuleCreateInfoNVX;
  };

  // wrapper struct for struct VkCuModuleTexturingModeCreateInfoNVX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleTexturingModeCreateInfoNVX.html
  struct CuModuleTexturingModeCreateInfoNVX
  {
    using NativeType = VkCuModuleTexturingModeCreateInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCuModuleTexturingModeCreateInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( Bool32 use64bitTexturing_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , use64bitTexturing{ use64bitTexturing_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast<CuModuleTexturingModeCreateInfoNVX const *>( &rhs ) )
    {
    }

    CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CuModuleTexturingModeCreateInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( Bool32 use64bitTexturing_ ) VULKAN_HPP_NOEXCEPT
    {
      use64bitTexturing = use64bitTexturing_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuModuleTexturingModeCreateInfoNVX *>( this );
    }

    operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuModuleTexturingModeCreateInfoNVX *>( this );
    }

    operator VkCuModuleTexturingModeCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCuModuleTexturingModeCreateInfoNVX *>( this );
    }

    operator VkCuModuleTexturingModeCreateInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCuModuleTexturingModeCreateInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, use64bitTexturing );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default;
#else
    bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( use64bitTexturing == rhs.use64bitTexturing );
#  endif
    }

    bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eCuModuleTexturingModeCreateInfoNVX;
    const void *  pNext             = {};
    Bool32        use64bitTexturing = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCuModuleTexturingModeCreateInfoNVX>
  {
    using Type = CuModuleTexturingModeCreateInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCuModuleTexturingModeCreateInfoNVX>
  {
    using Type = CuModuleTexturingModeCreateInfoNVX;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkCudaFunctionCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionCreateInfoNV.html
  struct CudaFunctionCreateInfoNV
  {
    using NativeType = VkCudaFunctionCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCudaFunctionCreateInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , module{ module_ }
      , pName{ pName_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CudaFunctionCreateInfoNV( *reinterpret_cast<CudaFunctionCreateInfoNV const *>( &rhs ) )
    {
    }

    CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CudaFunctionCreateInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( this );
    }

    operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCudaFunctionCreateInfoNV *>( this );
    }

    operator VkCudaFunctionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( this );
    }

    operator VkCudaFunctionCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCudaFunctionCreateInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CudaModuleNV const &, const char * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, module, pName );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = module <=> rhs.module; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
    }

    bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType  = StructureType::eCudaFunctionCreateInfoNV;
    const void *  pNext  = {};
    CudaModuleNV  module = {};
    const char *  pName  = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCudaFunctionCreateInfoNV>
  {
    using Type = CudaFunctionCreateInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eCudaFunctionCreateInfoNV>
  {
    using Type = CudaFunctionCreateInfoNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkCudaLaunchInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaLaunchInfoNV.html
  struct CudaLaunchInfoNV
  {
    using NativeType = VkCudaLaunchInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCudaLaunchInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaFunctionNV       function_       = {},
                                           uint32_t             gridDimX_       = {},
                                           uint32_t             gridDimY_       = {},
                                           uint32_t             gridDimZ_       = {},
                                           uint32_t             blockDimX_      = {},
                                           uint32_t             blockDimY_      = {},
                                           uint32_t             blockDimZ_      = {},
                                           uint32_t             sharedMemBytes_ = {},
                                           size_t               paramCount_     = {},
                                           const void * const * pParams_        = {},
                                           size_t               extraCount_     = {},
                                           const void * const * pExtras_        = {},
                                           const void *         pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , function{ function_ }
      , gridDimX{ gridDimX_ }
      , gridDimY{ gridDimY_ }
      , gridDimZ{ gridDimZ_ }
      , blockDimX{ blockDimX_ }
      , blockDimY{ blockDimY_ }
      , blockDimZ{ blockDimZ_ }
      , sharedMemBytes{ sharedMemBytes_ }
      , paramCount{ paramCount_ }
      , pParams{ pParams_ }
      , extraCount{ extraCount_ }
      , pExtras{ pExtras_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast<CudaLaunchInfoNV const *>( &rhs ) ) {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CudaLaunchInfoNV( CudaFunctionNV                                      function_,
                      uint32_t                                            gridDimX_,
                      uint32_t                                            gridDimY_,
                      uint32_t                                            gridDimZ_,
                      uint32_t                                            blockDimX_,
                      uint32_t                                            blockDimY_,
                      uint32_t                                            blockDimZ_,
                      uint32_t                                            sharedMemBytes_,
                      ArrayProxyNoTemporaries<const void * const> const & params_,
                      ArrayProxyNoTemporaries<const void * const> const & extras_ = {},
                      const void *                                        pNext_  = nullptr )
      : pNext( pNext_ )
      , function( function_ )
      , gridDimX( gridDimX_ )
      , gridDimY( gridDimY_ )
      , gridDimZ( gridDimZ_ )
      , blockDimX( blockDimX_ )
      , blockDimY( blockDimY_ )
      , blockDimZ( blockDimZ_ )
      , sharedMemBytes( sharedMemBytes_ )
      , paramCount( params_.size() )
      , pParams( params_.data() )
      , extraCount( extras_.size() )
      , pExtras( extras_.data() )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CudaLaunchInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT
    {
      function = function_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimX = gridDimX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimY = gridDimY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimZ = gridDimZ_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimX = blockDimX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimY = blockDimY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimZ = blockDimZ_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      sharedMemBytes = sharedMemBytes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
    {
      paramCount = paramCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
    {
      pParams = pParams_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CudaLaunchInfoNV & setParams( ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
    {
      paramCount = params_.size();
      pParams    = params_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
    {
      extraCount = extraCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
    {
      pExtras = pExtras_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CudaLaunchInfoNV & setExtras( ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
    {
      extraCount = extras_.size();
      pExtras    = extras_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCudaLaunchInfoNV *>( this );
    }

    operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCudaLaunchInfoNV *>( this );
    }

    operator VkCudaLaunchInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCudaLaunchInfoNV *>( this );
    }

    operator VkCudaLaunchInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCudaLaunchInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               CudaFunctionNV const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               size_t const &,
               const void * const * const &,
               size_t const &,
               const void * const * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CudaLaunchInfoNV const & ) const = default;
#  else
    bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) &&
             ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
             ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) &&
             ( pExtras == rhs.pExtras );
#    endif
    }

    bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType        sType          = StructureType::eCudaLaunchInfoNV;
    const void *         pNext          = {};
    CudaFunctionNV       function       = {};
    uint32_t             gridDimX       = {};
    uint32_t             gridDimY       = {};
    uint32_t             gridDimZ       = {};
    uint32_t             blockDimX      = {};
    uint32_t             blockDimY      = {};
    uint32_t             blockDimZ      = {};
    uint32_t             sharedMemBytes = {};
    size_t               paramCount     = {};
    const void * const * pParams        = {};
    size_t               extraCount     = {};
    const void * const * pExtras        = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCudaLaunchInfoNV>
  {
    using Type = CudaLaunchInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eCudaLaunchInfoNV>
  {
    using Type = CudaLaunchInfoNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkCudaModuleCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleCreateInfoNV.html
  struct CudaModuleCreateInfoNV
  {
    using NativeType = VkCudaModuleCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCudaModuleCreateInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CudaModuleCreateInfoNV( *reinterpret_cast<CudaModuleCreateInfoNV const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    CudaModuleCreateInfoNV( ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CudaModuleCreateInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    CudaModuleCreateInfoNV & setData( ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCudaModuleCreateInfoNV *>( this );
    }

    operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCudaModuleCreateInfoNV *>( this );
    }

    operator VkCudaModuleCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCudaModuleCreateInfoNV *>( this );
    }

    operator VkCudaModuleCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCudaModuleCreateInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, size_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataSize, pData );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CudaModuleCreateInfoNV const & ) const = default;
#  else
    bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
#    endif
    }

    bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType    = StructureType::eCudaModuleCreateInfoNV;
    const void *  pNext    = {};
    size_t        dataSize = {};
    const void *  pData    = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCudaModuleCreateInfoNV>
  {
    using Type = CudaModuleCreateInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eCudaModuleCreateInfoNV>
  {
    using Type = CudaModuleCreateInfoNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkCustomResolveCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCustomResolveCreateInfoEXT.html
  struct CustomResolveCreateInfoEXT
  {
    using NativeType = VkCustomResolveCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eCustomResolveCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CustomResolveCreateInfoEXT( Bool32         customResolve_           = {},
                                                     uint32_t       colorAttachmentCount_    = {},
                                                     const Format * pColorAttachmentFormats_ = {},
                                                     Format         depthAttachmentFormat_   = Format::eUndefined,
                                                     Format         stencilAttachmentFormat_ = Format::eUndefined,
                                                     const void *   pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , customResolve{ customResolve_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentFormats{ pColorAttachmentFormats_ }
      , depthAttachmentFormat{ depthAttachmentFormat_ }
      , stencilAttachmentFormat{ stencilAttachmentFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR CustomResolveCreateInfoEXT( CustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CustomResolveCreateInfoEXT( VkCustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CustomResolveCreateInfoEXT( *reinterpret_cast<CustomResolveCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CustomResolveCreateInfoEXT( Bool32                                        customResolve_,
                                ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_,
                                Format                                        depthAttachmentFormat_   = Format::eUndefined,
                                Format                                        stencilAttachmentFormat_ = Format::eUndefined,
                                const void *                                  pNext_                   = nullptr )
      : pNext( pNext_ )
      , customResolve( customResolve_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
      , pColorAttachmentFormats( colorAttachmentFormats_.data() )
      , depthAttachmentFormat( depthAttachmentFormat_ )
      , stencilAttachmentFormat( stencilAttachmentFormat_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    CustomResolveCreateInfoEXT & operator=( CustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    CustomResolveCreateInfoEXT & operator=( VkCustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<CustomResolveCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setCustomResolve( Bool32 customResolve_ ) VULKAN_HPP_NOEXCEPT
    {
      customResolve = customResolve_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentFormats = pColorAttachmentFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CustomResolveCreateInfoEXT & setColorAttachmentFormats( ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount    = static_cast<uint32_t>( colorAttachmentFormats_.size() );
      pColorAttachmentFormats = colorAttachmentFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      depthAttachmentFormat = depthAttachmentFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilAttachmentFormat = stencilAttachmentFormat_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkCustomResolveCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCustomResolveCreateInfoEXT *>( this );
    }

    operator VkCustomResolveCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCustomResolveCreateInfoEXT *>( this );
    }

    operator VkCustomResolveCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkCustomResolveCreateInfoEXT *>( this );
    }

    operator VkCustomResolveCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkCustomResolveCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, const Format * const &, Format const &, Format const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, customResolve, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CustomResolveCreateInfoEXT const & ) const = default;
#else
    bool operator==( CustomResolveCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customResolve == rhs.customResolve ) &&
             ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) &&
             ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
#  endif
    }

    bool operator!=( CustomResolveCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                   = StructureType::eCustomResolveCreateInfoEXT;
    const void *   pNext                   = {};
    Bool32         customResolve           = {};
    uint32_t       colorAttachmentCount    = {};
    const Format * pColorAttachmentFormats = {};
    Format         depthAttachmentFormat   = Format::eUndefined;
    Format         stencilAttachmentFormat = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkCustomResolveCreateInfoEXT>
  {
    using Type = CustomResolveCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eCustomResolveCreateInfoEXT>
  {
    using Type = CustomResolveCreateInfoEXT;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkD3D12FenceSubmitInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkD3D12FenceSubmitInfoKHR.html
  struct D3D12FenceSubmitInfoKHR
  {
    using NativeType = VkD3D12FenceSubmitInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eD3D12FenceSubmitInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t         waitSemaphoreValuesCount_   = {},
                                                  const uint64_t * pWaitSemaphoreValues_       = {},
                                                  uint32_t         signalSemaphoreValuesCount_ = {},
                                                  const uint64_t * pSignalSemaphoreValues_     = {},
                                                  const void *     pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ }
      , pWaitSemaphoreValues{ pWaitSemaphoreValues_ }
      , signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ }
      , pSignalSemaphoreValues{ pSignalSemaphoreValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR( ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
                             ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
                             const void *                                    pNext_                 = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
      , pWaitSemaphoreValues( waitSemaphoreValues_.data() )
      , signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
      , pSignalSemaphoreValues( signalSemaphoreValues_.data() )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreValues = pWaitSemaphoreValues_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
      pWaitSemaphoreValues     = waitSemaphoreValues_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreValues = pSignalSemaphoreValues_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
      pSignalSemaphoreValues     = signalSemaphoreValues_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>( this );
    }

    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>( this );
    }

    operator VkD3D12FenceSubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>( this );
    }

    operator VkD3D12FenceSubmitInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
#  else
    bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) &&
             ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) &&
             ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
#    endif
    }

    bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType    sType                      = StructureType::eD3D12FenceSubmitInfoKHR;
    const void *     pNext                      = {};
    uint32_t         waitSemaphoreValuesCount   = {};
    const uint64_t * pWaitSemaphoreValues       = {};
    uint32_t         signalSemaphoreValuesCount = {};
    const uint64_t * pSignalSemaphoreValues     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkD3D12FenceSubmitInfoKHR>
  {
    using Type = D3D12FenceSubmitInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
  {
    using Type = D3D12FenceSubmitInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkPhysicalDeviceDataGraphOperationSupportARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphOperationSupportARM.html
  struct PhysicalDeviceDataGraphOperationSupportARM
  {
    using NativeType = VkPhysicalDeviceDataGraphOperationSupportARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM(
      PhysicalDeviceDataGraphOperationTypeARM operationType_ = PhysicalDeviceDataGraphOperationTypeARM::eSpirvExtendedInstructionSet,
      std::array<char, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM> const & name_    = {},
      uint32_t                                                                                version_ = {} ) VULKAN_HPP_NOEXCEPT
      : operationType{ operationType_ }
      , name{ name_ }
      , version{ version_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDataGraphOperationSupportARM( VkPhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDataGraphOperationSupportARM( *reinterpret_cast<PhysicalDeviceDataGraphOperationSupportARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceDataGraphOperationSupportARM( PhysicalDeviceDataGraphOperationTypeARM operationType_, std::string const & name_, uint32_t version_ = {} )
      : operationType( operationType_ ), version( version_ )
    {
      VULKAN_HPP_ASSERT( name_.size() < VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM );
#    if defined( _WIN32 )
      strncpy_s( name, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.data(), name_.size() );
#    else
      strncpy( name, name_.data(), std::min<size_t>( VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.size() ) );
#    endif
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PhysicalDeviceDataGraphOperationSupportARM & operator=( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDataGraphOperationSupportARM & operator=( VkPhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDataGraphOperationSupportARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM &
      setOperationType( PhysicalDeviceDataGraphOperationTypeARM operationType_ ) VULKAN_HPP_NOEXCEPT
    {
      operationType = operationType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM &
      setName( std::array<char, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM> name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceDataGraphOperationSupportARM & setName( std::string const & name_ ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( name_.size() < VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM );
#    if defined( _WIN32 )
      strncpy_s( name, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.data(), name_.size() );
#    else
      strncpy( name, name_.data(), std::min<size_t>( VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.size() ) );
#    endif
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM & setVersion( uint32_t version_ ) VULKAN_HPP_NOEXCEPT
    {
      version = version_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDataGraphOperationSupportARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDataGraphOperationSupportARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphOperationSupportARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDataGraphOperationSupportARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphOperationSupportARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDataGraphOperationSupportARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphOperationSupportARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDataGraphOperationSupportARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PhysicalDeviceDataGraphOperationTypeARM const &,
               ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM> const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( operationType, name, version );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = operationType <=> rhs.operationType; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = version <=> rhs.version; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( operationType == rhs.operationType ) && ( strcmp( name, rhs.name ) == 0 ) && ( version == rhs.version );
    }

    bool operator!=( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    PhysicalDeviceDataGraphOperationTypeARM operationType = PhysicalDeviceDataGraphOperationTypeARM::eSpirvExtendedInstructionSet;
    ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM> name    = {};
    uint32_t                                                                            version = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDataGraphOperationSupportARM>
  {
    using Type = PhysicalDeviceDataGraphOperationSupportARM;
  };
#endif

  // wrapper struct for struct VkDataGraphPipelineBuiltinModelCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineBuiltinModelCreateInfoQCOM.html
  struct DataGraphPipelineBuiltinModelCreateInfoQCOM
  {
    using NativeType = VkDataGraphPipelineBuiltinModelCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineBuiltinModelCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM( const PhysicalDeviceDataGraphOperationSupportARM * pOperation_ = {},
                                                                         const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pOperation{ pOperation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      DataGraphPipelineBuiltinModelCreateInfoQCOM( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineBuiltinModelCreateInfoQCOM( VkDataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineBuiltinModelCreateInfoQCOM( *reinterpret_cast<DataGraphPipelineBuiltinModelCreateInfoQCOM const *>( &rhs ) )
    {
    }

    DataGraphPipelineBuiltinModelCreateInfoQCOM & operator=( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineBuiltinModelCreateInfoQCOM & operator=( VkDataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineBuiltinModelCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM &
      setPOperation( const PhysicalDeviceDataGraphOperationSupportARM * pOperation_ ) VULKAN_HPP_NOEXCEPT
    {
      pOperation = pOperation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineBuiltinModelCreateInfoQCOM *>( this );
    }

    operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineBuiltinModelCreateInfoQCOM *>( this );
    }

    operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineBuiltinModelCreateInfoQCOM *>( this );
    }

    operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineBuiltinModelCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const PhysicalDeviceDataGraphOperationSupportARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pOperation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineBuiltinModelCreateInfoQCOM const & ) const = default;
#else
    bool operator==( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pOperation == rhs.pOperation );
#  endif
    }

    bool operator!=( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType      = StructureType::eDataGraphPipelineBuiltinModelCreateInfoQCOM;
    const void *                                       pNext      = {};
    const PhysicalDeviceDataGraphOperationSupportARM * pOperation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineBuiltinModelCreateInfoQCOM>
  {
    using Type = DataGraphPipelineBuiltinModelCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineBuiltinModelCreateInfoQCOM>
  {
    using Type = DataGraphPipelineBuiltinModelCreateInfoQCOM;
  };

  // wrapper struct for struct VkDataGraphPipelineCompilerControlCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineCompilerControlCreateInfoARM.html
  struct DataGraphPipelineCompilerControlCreateInfoARM
  {
    using NativeType = VkDataGraphPipelineCompilerControlCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineCompilerControlCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineCompilerControlCreateInfoARM( const char * pVendorOptions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pVendorOptions{ pVendorOptions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DataGraphPipelineCompilerControlCreateInfoARM( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineCompilerControlCreateInfoARM( VkDataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineCompilerControlCreateInfoARM( *reinterpret_cast<DataGraphPipelineCompilerControlCreateInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineCompilerControlCreateInfoARM & operator=( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineCompilerControlCreateInfoARM & operator=( VkDataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineCompilerControlCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM & setPVendorOptions( const char * pVendorOptions_ ) VULKAN_HPP_NOEXCEPT
    {
      pVendorOptions = pVendorOptions_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineCompilerControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineCompilerControlCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCompilerControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineCompilerControlCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCompilerControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineCompilerControlCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCompilerControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineCompilerControlCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const char * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pVendorOptions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( pVendorOptions != rhs.pVendorOptions )
        if ( auto cmp = strcmp( pVendorOptions, rhs.pVendorOptions ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( ( pVendorOptions == rhs.pVendorOptions ) || ( strcmp( pVendorOptions, rhs.pVendorOptions ) == 0 ) );
    }

    bool operator!=( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType          = StructureType::eDataGraphPipelineCompilerControlCreateInfoARM;
    const void *  pNext          = {};
    const char *  pVendorOptions = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineCompilerControlCreateInfoARM>
  {
    using Type = DataGraphPipelineCompilerControlCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineCompilerControlCreateInfoARM>
  {
    using Type = DataGraphPipelineCompilerControlCreateInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineConstantARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineConstantARM.html
  struct DataGraphPipelineConstantARM
  {
    using NativeType = VkDataGraphPipelineConstantARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineConstantARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantARM( uint32_t id_ = {}, const void * pConstantData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , id{ id_ }
      , pConstantData{ pConstantData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantARM( DataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineConstantARM( VkDataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineConstantARM( *reinterpret_cast<DataGraphPipelineConstantARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineConstantARM & operator=( DataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineConstantARM & operator=( VkDataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineConstantARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
    {
      id = id_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setPConstantData( const void * pConstantData_ ) VULKAN_HPP_NOEXCEPT
    {
      pConstantData = pConstantData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineConstantARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineConstantARM *>( this );
    }

    operator VkDataGraphPipelineConstantARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineConstantARM *>( this );
    }

    operator VkDataGraphPipelineConstantARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineConstantARM *>( this );
    }

    operator VkDataGraphPipelineConstantARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineConstantARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, id, pConstantData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineConstantARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineConstantARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( pConstantData == rhs.pConstantData );
#  endif
    }

    bool operator!=( DataGraphPipelineConstantARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eDataGraphPipelineConstantARM;
    const void *  pNext         = {};
    uint32_t      id            = {};
    const void *  pConstantData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineConstantARM>
  {
    using Type = DataGraphPipelineConstantARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineConstantARM>
  {
    using Type = DataGraphPipelineConstantARM;
  };

  // wrapper struct for struct VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM.html
  struct DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM
  {
    using NativeType = VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( uint32_t     dimension_ = {},
                                                                                       uint32_t     zeroCount_ = {},
                                                                                       uint32_t     groupSize_ = {},
                                                                                       const void * pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dimension{ dimension_ }
      , zeroCount{ zeroCount_ }
      , groupSize{ groupSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM(
      DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs )
      VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM(
          *reinterpret_cast<DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM &
      operator=( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM &
      operator=( VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setDimension( uint32_t dimension_ ) VULKAN_HPP_NOEXCEPT
    {
      dimension = dimension_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setZeroCount( uint32_t zeroCount_ ) VULKAN_HPP_NOEXCEPT
    {
      zeroCount = zeroCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setGroupSize( uint32_t groupSize_ ) VULKAN_HPP_NOEXCEPT
    {
      groupSize = groupSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *>( this );
    }

    operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *>( this );
    }

    operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *>( this );
    }

    operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dimension, zeroCount, groupSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dimension == rhs.dimension ) && ( zeroCount == rhs.zeroCount ) &&
             ( groupSize == rhs.groupSize );
#  endif
    }

    bool operator!=( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM;
    const void *  pNext     = {};
    uint32_t      dimension = {};
    uint32_t      zeroCount = {};
    uint32_t      groupSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM>
  {
    using Type = DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM>
  {
    using Type = DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineResourceInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineResourceInfoARM.html
  struct DataGraphPipelineResourceInfoARM
  {
    using NativeType = VkDataGraphPipelineResourceInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineResourceInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineResourceInfoARM( uint32_t     descriptorSet_ = {},
                                                           uint32_t     binding_       = {},
                                                           uint32_t     arrayElement_  = {},
                                                           const void * pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorSet{ descriptorSet_ }
      , binding{ binding_ }
      , arrayElement{ arrayElement_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineResourceInfoARM( DataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineResourceInfoARM( VkDataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineResourceInfoARM( *reinterpret_cast<DataGraphPipelineResourceInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineResourceInfoARM & operator=( DataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineResourceInfoARM & operator=( VkDataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineResourceInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setDescriptorSet( uint32_t descriptorSet_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSet = descriptorSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setArrayElement( uint32_t arrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayElement = arrayElement_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineResourceInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineResourceInfoARM *>( this );
    }

    operator VkDataGraphPipelineResourceInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineResourceInfoARM *>( this );
    }

    operator VkDataGraphPipelineResourceInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineResourceInfoARM *>( this );
    }

    operator VkDataGraphPipelineResourceInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineResourceInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorSet, binding, arrayElement );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineResourceInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineResourceInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSet == rhs.descriptorSet ) && ( binding == rhs.binding ) &&
             ( arrayElement == rhs.arrayElement );
#  endif
    }

    bool operator!=( DataGraphPipelineResourceInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eDataGraphPipelineResourceInfoARM;
    const void *  pNext         = {};
    uint32_t      descriptorSet = {};
    uint32_t      binding       = {};
    uint32_t      arrayElement  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineResourceInfoARM>
  {
    using Type = DataGraphPipelineResourceInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineResourceInfoARM>
  {
    using Type = DataGraphPipelineResourceInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineCreateInfoARM.html
  struct DataGraphPipelineCreateInfoARM
  {
    using NativeType = VkDataGraphPipelineCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineCreateInfoARM( PipelineCreateFlags2KHR                  flags_             = {},
                                                         PipelineLayout                           layout_            = {},
                                                         uint32_t                                 resourceInfoCount_ = {},
                                                         const DataGraphPipelineResourceInfoARM * pResourceInfos_    = {},
                                                         const void *                             pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , layout{ layout_ }
      , resourceInfoCount{ resourceInfoCount_ }
      , pResourceInfos{ pResourceInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineCreateInfoARM( DataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineCreateInfoARM( VkDataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineCreateInfoARM( *reinterpret_cast<DataGraphPipelineCreateInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineCreateInfoARM( PipelineCreateFlags2KHR                                                 flags_,
                                    PipelineLayout                                                          layout_,
                                    ArrayProxyNoTemporaries<const DataGraphPipelineResourceInfoARM> const & resourceInfos_,
                                    const void *                                                            pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , layout( layout_ )
      , resourceInfoCount( static_cast<uint32_t>( resourceInfos_.size() ) )
      , pResourceInfos( resourceInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DataGraphPipelineCreateInfoARM & operator=( DataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineCreateInfoARM & operator=( VkDataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setFlags( PipelineCreateFlags2KHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setResourceInfoCount( uint32_t resourceInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceInfoCount = resourceInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setPResourceInfos( const DataGraphPipelineResourceInfoARM * pResourceInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pResourceInfos = pResourceInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineCreateInfoARM &
      setResourceInfos( ArrayProxyNoTemporaries<const DataGraphPipelineResourceInfoARM> const & resourceInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceInfoCount = static_cast<uint32_t>( resourceInfos_.size() );
      pResourceInfos    = resourceInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags2KHR const &,
               PipelineLayout const &,
               uint32_t const &,
               const DataGraphPipelineResourceInfoARM * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, layout, resourceInfoCount, pResourceInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineCreateInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( layout == rhs.layout ) &&
             ( resourceInfoCount == rhs.resourceInfoCount ) && ( pResourceInfos == rhs.pResourceInfos );
#  endif
    }

    bool operator!=( DataGraphPipelineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType             = StructureType::eDataGraphPipelineCreateInfoARM;
    const void *                             pNext             = {};
    PipelineCreateFlags2KHR                  flags             = {};
    PipelineLayout                           layout            = {};
    uint32_t                                 resourceInfoCount = {};
    const DataGraphPipelineResourceInfoARM * pResourceInfos    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineCreateInfoARM>
  {
    using Type = DataGraphPipelineCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineCreateInfoARM>
  {
    using Type = DataGraphPipelineCreateInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineDispatchInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineDispatchInfoARM.html
  struct DataGraphPipelineDispatchInfoARM
  {
    using NativeType = VkDataGraphPipelineDispatchInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineDispatchInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineDispatchInfoARM( DataGraphPipelineDispatchFlagsARM flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineDispatchInfoARM( DataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineDispatchInfoARM( VkDataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineDispatchInfoARM( *reinterpret_cast<DataGraphPipelineDispatchInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineDispatchInfoARM & operator=( DataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineDispatchInfoARM & operator=( VkDataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineDispatchInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM & setFlags( DataGraphPipelineDispatchFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineDispatchInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineDispatchInfoARM *>( this );
    }

    operator VkDataGraphPipelineDispatchInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineDispatchInfoARM *>( this );
    }

    operator VkDataGraphPipelineDispatchInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineDispatchInfoARM *>( this );
    }

    operator VkDataGraphPipelineDispatchInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineDispatchInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DataGraphPipelineDispatchFlagsARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineDispatchInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineDispatchInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( DataGraphPipelineDispatchInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType = StructureType::eDataGraphPipelineDispatchInfoARM;
    void *                            pNext = {};
    DataGraphPipelineDispatchFlagsARM flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineDispatchInfoARM>
  {
    using Type = DataGraphPipelineDispatchInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineDispatchInfoARM>
  {
    using Type = DataGraphPipelineDispatchInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineIdentifierCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineIdentifierCreateInfoARM.html
  struct DataGraphPipelineIdentifierCreateInfoARM
  {
    using NativeType = VkDataGraphPipelineIdentifierCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineIdentifierCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineIdentifierCreateInfoARM( uint32_t        identifierSize_ = {},
                                                                   const uint8_t * pIdentifier_    = {},
                                                                   const void *    pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , identifierSize{ identifierSize_ }
      , pIdentifier{ pIdentifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineIdentifierCreateInfoARM( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineIdentifierCreateInfoARM( VkDataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineIdentifierCreateInfoARM( *reinterpret_cast<DataGraphPipelineIdentifierCreateInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineIdentifierCreateInfoARM( ArrayProxyNoTemporaries<const uint8_t> const & identifier_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), identifierSize( static_cast<uint32_t>( identifier_.size() ) ), pIdentifier( identifier_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DataGraphPipelineIdentifierCreateInfoARM & operator=( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineIdentifierCreateInfoARM & operator=( VkDataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineIdentifierCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT
    {
      identifierSize = identifierSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT
    {
      pIdentifier = pIdentifier_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineIdentifierCreateInfoARM & setIdentifier( ArrayProxyNoTemporaries<const uint8_t> const & identifier_ ) VULKAN_HPP_NOEXCEPT
    {
      identifierSize = static_cast<uint32_t>( identifier_.size() );
      pIdentifier    = identifier_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineIdentifierCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineIdentifierCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineIdentifierCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineIdentifierCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineIdentifierCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineIdentifierCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineIdentifierCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineIdentifierCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint8_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, identifierSize, pIdentifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineIdentifierCreateInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier );
#  endif
    }

    bool operator!=( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType          = StructureType::eDataGraphPipelineIdentifierCreateInfoARM;
    const void *    pNext          = {};
    uint32_t        identifierSize = {};
    const uint8_t * pIdentifier    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineIdentifierCreateInfoARM>
  {
    using Type = DataGraphPipelineIdentifierCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineIdentifierCreateInfoARM>
  {
    using Type = DataGraphPipelineIdentifierCreateInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineInfoARM.html
  struct DataGraphPipelineInfoARM
  {
    using NativeType = VkDataGraphPipelineInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineInfoARM( Pipeline dataGraphPipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataGraphPipeline{ dataGraphPipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineInfoARM( DataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineInfoARM( VkDataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineInfoARM( *reinterpret_cast<DataGraphPipelineInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineInfoARM & operator=( DataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineInfoARM & operator=( VkDataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM & setDataGraphPipeline( Pipeline dataGraphPipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphPipeline = dataGraphPipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineInfoARM *>( this );
    }

    operator VkDataGraphPipelineInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineInfoARM *>( this );
    }

    operator VkDataGraphPipelineInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineInfoARM *>( this );
    }

    operator VkDataGraphPipelineInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataGraphPipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraphPipeline == rhs.dataGraphPipeline );
#  endif
    }

    bool operator!=( DataGraphPipelineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eDataGraphPipelineInfoARM;
    const void *  pNext             = {};
    Pipeline      dataGraphPipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineInfoARM>
  {
    using Type = DataGraphPipelineInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineInfoARM>
  {
    using Type = DataGraphPipelineInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelinePropertyQueryResultARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelinePropertyQueryResultARM.html
  struct DataGraphPipelinePropertyQueryResultARM
  {
    using NativeType = VkDataGraphPipelinePropertyQueryResultARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelinePropertyQueryResultARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyARM property_ = DataGraphPipelinePropertyARM::eCreationLog,
                                                                  Bool32                       isText_   = {},
                                                                  size_t                       dataSize_ = {},
                                                                  void *                       pData_    = {},
                                                                  const void *                 pNext_    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , property{ property_ }
      , isText{ isText_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelinePropertyQueryResultARM( VkDataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelinePropertyQueryResultARM( *reinterpret_cast<DataGraphPipelinePropertyQueryResultARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyARM       property_,
                                             Bool32                             isText_,
                                             ArrayProxyNoTemporaries<T> const & data_,
                                             const void *                       pNext_ = nullptr )
      : pNext( pNext_ ), property( property_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DataGraphPipelinePropertyQueryResultARM & operator=( DataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelinePropertyQueryResultARM & operator=( VkDataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelinePropertyQueryResultARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setProperty( DataGraphPipelinePropertyARM property_ ) VULKAN_HPP_NOEXCEPT
    {
      property = property_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setIsText( Bool32 isText_ ) VULKAN_HPP_NOEXCEPT
    {
      isText = isText_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DataGraphPipelinePropertyQueryResultARM & setData( ArrayProxyNoTemporaries<T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelinePropertyQueryResultARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelinePropertyQueryResultARM *>( this );
    }

    operator VkDataGraphPipelinePropertyQueryResultARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelinePropertyQueryResultARM *>( this );
    }

    operator VkDataGraphPipelinePropertyQueryResultARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelinePropertyQueryResultARM *>( this );
    }

    operator VkDataGraphPipelinePropertyQueryResultARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelinePropertyQueryResultARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DataGraphPipelinePropertyARM const &, Bool32 const &, size_t const &, void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, property, isText, dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelinePropertyQueryResultARM const & ) const = default;
#else
    bool operator==( DataGraphPipelinePropertyQueryResultARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( property == rhs.property ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) &&
             ( pData == rhs.pData );
#  endif
    }

    bool operator!=( DataGraphPipelinePropertyQueryResultARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType    = StructureType::eDataGraphPipelinePropertyQueryResultARM;
    const void *                 pNext    = {};
    DataGraphPipelinePropertyARM property = DataGraphPipelinePropertyARM::eCreationLog;
    Bool32                       isText   = {};
    size_t                       dataSize = {};
    void *                       pData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelinePropertyQueryResultARM>
  {
    using Type = DataGraphPipelinePropertyQueryResultARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelinePropertyQueryResultARM>
  {
    using Type = DataGraphPipelinePropertyQueryResultARM;
  };

  // wrapper struct for struct VkDataGraphPipelineSessionBindPointRequirementARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionBindPointRequirementARM.html
  struct DataGraphPipelineSessionBindPointRequirementARM
  {
    using NativeType = VkDataGraphPipelineSessionBindPointRequirementARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineSessionBindPointRequirementARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementARM(
      DataGraphPipelineSessionBindPointARM     bindPoint_     = DataGraphPipelineSessionBindPointARM::eTransient,
      DataGraphPipelineSessionBindPointTypeARM bindPointType_ = DataGraphPipelineSessionBindPointTypeARM::eMemory,
      uint32_t                                 numObjects_    = {},
      const void *                             pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , bindPoint{ bindPoint_ }
      , bindPointType{ bindPointType_ }
      , numObjects{ numObjects_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DataGraphPipelineSessionBindPointRequirementARM( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineSessionBindPointRequirementARM( VkDataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineSessionBindPointRequirementARM( *reinterpret_cast<DataGraphPipelineSessionBindPointRequirementARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineSessionBindPointRequirementARM & operator=( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineSessionBindPointRequirementARM & operator=( VkDataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineSessionBindPointRequirementARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementARM &
      setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      bindPoint = bindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementARM &
      setBindPointType( DataGraphPipelineSessionBindPointTypeARM bindPointType_ ) VULKAN_HPP_NOEXCEPT
    {
      bindPointType = bindPointType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementARM & setNumObjects( uint32_t numObjects_ ) VULKAN_HPP_NOEXCEPT
    {
      numObjects = numObjects_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineSessionBindPointRequirementARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DataGraphPipelineSessionBindPointARM const &,
               DataGraphPipelineSessionBindPointTypeARM const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, bindPoint, bindPointType, numObjects );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineSessionBindPointRequirementARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindPoint == rhs.bindPoint ) && ( bindPointType == rhs.bindPointType ) &&
             ( numObjects == rhs.numObjects );
#  endif
    }

    bool operator!=( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType         = StructureType::eDataGraphPipelineSessionBindPointRequirementARM;
    const void *                             pNext         = {};
    DataGraphPipelineSessionBindPointARM     bindPoint     = DataGraphPipelineSessionBindPointARM::eTransient;
    DataGraphPipelineSessionBindPointTypeARM bindPointType = DataGraphPipelineSessionBindPointTypeARM::eMemory;
    uint32_t                                 numObjects    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineSessionBindPointRequirementARM>
  {
    using Type = DataGraphPipelineSessionBindPointRequirementARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineSessionBindPointRequirementARM>
  {
    using Type = DataGraphPipelineSessionBindPointRequirementARM;
  };

  // wrapper struct for struct VkDataGraphPipelineSessionBindPointRequirementsInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionBindPointRequirementsInfoARM.html
  struct DataGraphPipelineSessionBindPointRequirementsInfoARM
  {
    using NativeType = VkDataGraphPipelineSessionBindPointRequirementsInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineSessionBindPointRequirementsInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementsInfoARM( DataGraphPipelineSessionARM session_ = {},
                                                                               const void *                pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , session{ session_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DataGraphPipelineSessionBindPointRequirementsInfoARM( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineSessionBindPointRequirementsInfoARM( VkDataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineSessionBindPointRequirementsInfoARM( *reinterpret_cast<DataGraphPipelineSessionBindPointRequirementsInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineSessionBindPointRequirementsInfoARM &
      operator=( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineSessionBindPointRequirementsInfoARM & operator=( VkDataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineSessionBindPointRequirementsInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM & setSession( DataGraphPipelineSessionARM session_ ) VULKAN_HPP_NOEXCEPT
    {
      session = session_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DataGraphPipelineSessionARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, session );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineSessionBindPointRequirementsInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session );
#  endif
    }

    bool operator!=( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType   = StructureType::eDataGraphPipelineSessionBindPointRequirementsInfoARM;
    const void *                pNext   = {};
    DataGraphPipelineSessionARM session = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineSessionBindPointRequirementsInfoARM>
  {
    using Type = DataGraphPipelineSessionBindPointRequirementsInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineSessionBindPointRequirementsInfoARM>
  {
    using Type = DataGraphPipelineSessionBindPointRequirementsInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineSessionCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionCreateInfoARM.html
  struct DataGraphPipelineSessionCreateInfoARM
  {
    using NativeType = VkDataGraphPipelineSessionCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineSessionCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionCreateInfoARM( DataGraphPipelineSessionCreateFlagsARM flags_             = {},
                                                                Pipeline                               dataGraphPipeline_ = {},
                                                                const void *                           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dataGraphPipeline{ dataGraphPipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionCreateInfoARM( DataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineSessionCreateInfoARM( VkDataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineSessionCreateInfoARM( *reinterpret_cast<DataGraphPipelineSessionCreateInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineSessionCreateInfoARM & operator=( DataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineSessionCreateInfoARM & operator=( VkDataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineSessionCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setFlags( DataGraphPipelineSessionCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setDataGraphPipeline( Pipeline dataGraphPipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphPipeline = dataGraphPipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineSessionCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineSessionCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineSessionCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineSessionCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineSessionCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DataGraphPipelineSessionCreateFlagsARM const &, Pipeline const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, dataGraphPipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineSessionCreateInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineSessionCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dataGraphPipeline == rhs.dataGraphPipeline );
#  endif
    }

    bool operator!=( DataGraphPipelineSessionCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType             = StructureType::eDataGraphPipelineSessionCreateInfoARM;
    const void *                           pNext             = {};
    DataGraphPipelineSessionCreateFlagsARM flags             = {};
    Pipeline                               dataGraphPipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineSessionCreateInfoARM>
  {
    using Type = DataGraphPipelineSessionCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineSessionCreateInfoARM>
  {
    using Type = DataGraphPipelineSessionCreateInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineSessionMemoryRequirementsInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionMemoryRequirementsInfoARM.html
  struct DataGraphPipelineSessionMemoryRequirementsInfoARM
  {
    using NativeType = VkDataGraphPipelineSessionMemoryRequirementsInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineSessionMemoryRequirementsInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DataGraphPipelineSessionMemoryRequirementsInfoARM( DataGraphPipelineSessionARM          session_     = {},
                                                         DataGraphPipelineSessionBindPointARM bindPoint_   = DataGraphPipelineSessionBindPointARM::eTransient,
                                                         uint32_t                             objectIndex_ = {},
                                                         const void *                         pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , session{ session_ }
      , bindPoint{ bindPoint_ }
      , objectIndex{ objectIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DataGraphPipelineSessionMemoryRequirementsInfoARM( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineSessionMemoryRequirementsInfoARM( VkDataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineSessionMemoryRequirementsInfoARM( *reinterpret_cast<DataGraphPipelineSessionMemoryRequirementsInfoARM const *>( &rhs ) )
    {
    }

    DataGraphPipelineSessionMemoryRequirementsInfoARM &
      operator=( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineSessionMemoryRequirementsInfoARM & operator=( VkDataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineSessionMemoryRequirementsInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setSession( DataGraphPipelineSessionARM session_ ) VULKAN_HPP_NOEXCEPT
    {
      session = session_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM &
      setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      bindPoint = bindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setObjectIndex( uint32_t objectIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      objectIndex = objectIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( this );
    }

    operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DataGraphPipelineSessionARM const &, DataGraphPipelineSessionBindPointARM const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, session, bindPoint, objectIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphPipelineSessionMemoryRequirementsInfoARM const & ) const = default;
#else
    bool operator==( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session ) && ( bindPoint == rhs.bindPoint ) &&
             ( objectIndex == rhs.objectIndex );
#  endif
    }

    bool operator!=( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType       = StructureType::eDataGraphPipelineSessionMemoryRequirementsInfoARM;
    const void *                         pNext       = {};
    DataGraphPipelineSessionARM          session     = {};
    DataGraphPipelineSessionBindPointARM bindPoint   = DataGraphPipelineSessionBindPointARM::eTransient;
    uint32_t                             objectIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineSessionMemoryRequirementsInfoARM>
  {
    using Type = DataGraphPipelineSessionMemoryRequirementsInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineSessionMemoryRequirementsInfoARM>
  {
    using Type = DataGraphPipelineSessionMemoryRequirementsInfoARM;
  };

  // wrapper struct for struct VkDataGraphPipelineShaderModuleCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineShaderModuleCreateInfoARM.html
  struct DataGraphPipelineShaderModuleCreateInfoARM
  {
    using NativeType = VkDataGraphPipelineShaderModuleCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphPipelineShaderModuleCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphPipelineShaderModuleCreateInfoARM( ShaderModule                         module_              = {},
                                                                     const char *                         pName_               = {},
                                                                     const SpecializationInfo *           pSpecializationInfo_ = {},
                                                                     uint32_t                             constantCount_       = {},
                                                                     const DataGraphPipelineConstantARM * pConstants_          = {},
                                                                     const void *                         pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , module{ module_ }
      , pName{ pName_ }
      , pSpecializationInfo{ pSpecializationInfo_ }
      , constantCount{ constantCount_ }
      , pConstants{ pConstants_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphPipelineShaderModuleCreateInfoARM( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphPipelineShaderModuleCreateInfoARM( VkDataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphPipelineShaderModuleCreateInfoARM( *reinterpret_cast<DataGraphPipelineShaderModuleCreateInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineShaderModuleCreateInfoARM( ShaderModule                                                        module_,
                                                const char *                                                        pName_,
                                                const SpecializationInfo *                                          pSpecializationInfo_,
                                                ArrayProxyNoTemporaries<const DataGraphPipelineConstantARM> const & constants_,
                                                const void *                                                        pNext_ = nullptr )
      : pNext( pNext_ )
      , module( module_ )
      , pName( pName_ )
      , pSpecializationInfo( pSpecializationInfo_ )
      , constantCount( static_cast<uint32_t>( constants_.size() ) )
      , pConstants( constants_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DataGraphPipelineShaderModuleCreateInfoARM & operator=( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphPipelineShaderModuleCreateInfoARM & operator=( VkDataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphPipelineShaderModuleCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setModule( ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM &
      setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpecializationInfo = pSpecializationInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setConstantCount( uint32_t constantCount_ ) VULKAN_HPP_NOEXCEPT
    {
      constantCount = constantCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPConstants( const DataGraphPipelineConstantARM * pConstants_ ) VULKAN_HPP_NOEXCEPT
    {
      pConstants = pConstants_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphPipelineShaderModuleCreateInfoARM &
      setConstants( ArrayProxyNoTemporaries<const DataGraphPipelineConstantARM> const & constants_ ) VULKAN_HPP_NOEXCEPT
    {
      constantCount = static_cast<uint32_t>( constants_.size() );
      pConstants    = constants_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphPipelineShaderModuleCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphPipelineShaderModuleCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineShaderModuleCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphPipelineShaderModuleCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineShaderModuleCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphPipelineShaderModuleCreateInfoARM *>( this );
    }

    operator VkDataGraphPipelineShaderModuleCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphPipelineShaderModuleCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderModule const &,
               const char * const &,
               const SpecializationInfo * const &,
               uint32_t const &,
               const DataGraphPipelineConstantARM * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, module, pName, pSpecializationInfo, constantCount, pConstants );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = module <=> rhs.module; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
        return cmp;
      if ( auto cmp = constantCount <=> rhs.constantCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pConstants <=> rhs.pConstants; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) &&
             ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ) &&
             ( constantCount == rhs.constantCount ) && ( pConstants == rhs.pConstants );
    }

    bool operator!=( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                        sType               = StructureType::eDataGraphPipelineShaderModuleCreateInfoARM;
    const void *                         pNext               = {};
    ShaderModule                         module              = {};
    const char *                         pName               = {};
    const SpecializationInfo *           pSpecializationInfo = {};
    uint32_t                             constantCount       = {};
    const DataGraphPipelineConstantARM * pConstants          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphPipelineShaderModuleCreateInfoARM>
  {
    using Type = DataGraphPipelineShaderModuleCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphPipelineShaderModuleCreateInfoARM>
  {
    using Type = DataGraphPipelineShaderModuleCreateInfoARM;
  };

  // wrapper struct for struct VkPhysicalDeviceDataGraphProcessingEngineARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphProcessingEngineARM.html
  struct PhysicalDeviceDataGraphProcessingEngineARM
  {
    using NativeType = VkPhysicalDeviceDataGraphProcessingEngineARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphProcessingEngineARM(
      PhysicalDeviceDataGraphProcessingEngineTypeARM type_      = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault,
      Bool32                                         isForeign_ = {} ) VULKAN_HPP_NOEXCEPT
      : type{ type_ }
      , isForeign{ isForeign_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphProcessingEngineARM( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDataGraphProcessingEngineARM( VkPhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDataGraphProcessingEngineARM( *reinterpret_cast<PhysicalDeviceDataGraphProcessingEngineARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceDataGraphProcessingEngineARM & operator=( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDataGraphProcessingEngineARM & operator=( VkPhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDataGraphProcessingEngineARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM & setType( PhysicalDeviceDataGraphProcessingEngineTypeARM type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM & setIsForeign( Bool32 isForeign_ ) VULKAN_HPP_NOEXCEPT
    {
      isForeign = isForeign_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDataGraphProcessingEngineARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDataGraphProcessingEngineARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphProcessingEngineARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDataGraphProcessingEngineARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphProcessingEngineARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDataGraphProcessingEngineARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphProcessingEngineARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDataGraphProcessingEngineARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PhysicalDeviceDataGraphProcessingEngineTypeARM const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( type, isForeign );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDataGraphProcessingEngineARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( type == rhs.type ) && ( isForeign == rhs.isForeign );
#  endif
    }

    bool operator!=( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    PhysicalDeviceDataGraphProcessingEngineTypeARM type      = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault;
    Bool32                                         isForeign = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDataGraphProcessingEngineARM>
  {
    using Type = PhysicalDeviceDataGraphProcessingEngineARM;
  };
#endif

  // wrapper struct for struct VkDataGraphProcessingEngineCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphProcessingEngineCreateInfoARM.html
  struct DataGraphProcessingEngineCreateInfoARM
  {
    using NativeType = VkDataGraphProcessingEngineCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDataGraphProcessingEngineCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DataGraphProcessingEngineCreateInfoARM( uint32_t                                     processingEngineCount_ = {},
                                                                 PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines_    = {},
                                                                 const void *                                 pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , processingEngineCount{ processingEngineCount_ }
      , pProcessingEngines{ pProcessingEngines_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DataGraphProcessingEngineCreateInfoARM( DataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DataGraphProcessingEngineCreateInfoARM( VkDataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DataGraphProcessingEngineCreateInfoARM( *reinterpret_cast<DataGraphProcessingEngineCreateInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphProcessingEngineCreateInfoARM( ArrayProxyNoTemporaries<PhysicalDeviceDataGraphProcessingEngineARM> const & processingEngines_,
                                            const void *                                                                pNext_ = nullptr )
      : pNext( pNext_ ), processingEngineCount( static_cast<uint32_t>( processingEngines_.size() ) ), pProcessingEngines( processingEngines_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DataGraphProcessingEngineCreateInfoARM & operator=( DataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DataGraphProcessingEngineCreateInfoARM & operator=( VkDataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DataGraphProcessingEngineCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM & setProcessingEngineCount( uint32_t processingEngineCount_ ) VULKAN_HPP_NOEXCEPT
    {
      processingEngineCount = processingEngineCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM &
      setPProcessingEngines( PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines_ ) VULKAN_HPP_NOEXCEPT
    {
      pProcessingEngines = pProcessingEngines_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DataGraphProcessingEngineCreateInfoARM &
      setProcessingEngines( ArrayProxyNoTemporaries<PhysicalDeviceDataGraphProcessingEngineARM> const & processingEngines_ ) VULKAN_HPP_NOEXCEPT
    {
      processingEngineCount = static_cast<uint32_t>( processingEngines_.size() );
      pProcessingEngines    = processingEngines_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDataGraphProcessingEngineCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDataGraphProcessingEngineCreateInfoARM *>( this );
    }

    operator VkDataGraphProcessingEngineCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDataGraphProcessingEngineCreateInfoARM *>( this );
    }

    operator VkDataGraphProcessingEngineCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDataGraphProcessingEngineCreateInfoARM *>( this );
    }

    operator VkDataGraphProcessingEngineCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDataGraphProcessingEngineCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, PhysicalDeviceDataGraphProcessingEngineARM * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, processingEngineCount, pProcessingEngines );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DataGraphProcessingEngineCreateInfoARM const & ) const = default;
#else
    bool operator==( DataGraphProcessingEngineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( processingEngineCount == rhs.processingEngineCount ) &&
             ( pProcessingEngines == rhs.pProcessingEngines );
#  endif
    }

    bool operator!=( DataGraphProcessingEngineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                sType                 = StructureType::eDataGraphProcessingEngineCreateInfoARM;
    const void *                                 pNext                 = {};
    uint32_t                                     processingEngineCount = {};
    PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDataGraphProcessingEngineCreateInfoARM>
  {
    using Type = DataGraphProcessingEngineCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDataGraphProcessingEngineCreateInfoARM>
  {
    using Type = DataGraphProcessingEngineCreateInfoARM;
  };

  // wrapper struct for struct VkDebugMarkerMarkerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerMarkerInfoEXT.html
  struct DebugMarkerMarkerInfoEXT
  {
    using NativeType = VkDebugMarkerMarkerInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugMarkerMarkerInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pMarkerName{ pMarkerName_ }
      , color{ color_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
    {
    }

    DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT
    {
      pMarkerName = pMarkerName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array<float, 4> color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( this );
    }

    operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>( this );
    }

    operator VkDebugMarkerMarkerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( this );
    }

    operator VkDebugMarkerMarkerInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const char * const &, ArrayWrapper1D<float, 4> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pMarkerName, color );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( pMarkerName != rhs.pMarkerName )
        if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 )
          return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
      if ( auto cmp = color <=> rhs.color; cmp != 0 )
        return cmp;

      return std::partial_ordering::equivalent;
    }
#endif

    bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) &&
             ( color == rhs.color );
    }

    bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType            sType       = StructureType::eDebugMarkerMarkerInfoEXT;
    const void *             pNext       = {};
    const char *             pMarkerName = {};
    ArrayWrapper1D<float, 4> color       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugMarkerMarkerInfoEXT>
  {
    using Type = DebugMarkerMarkerInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
  {
    using Type = DebugMarkerMarkerInfoEXT;
  };

  // wrapper struct for struct VkDebugMarkerObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectNameInfoEXT.html
  struct DebugMarkerObjectNameInfoEXT
  {
    using NativeType = VkDebugMarkerObjectNameInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugMarkerObjectNameInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_  = DebugReportObjectTypeEXT::eUnknown,
                                                       uint64_t                 object_      = {},
                                                       const char *             pObjectName_ = {},
                                                       const void *             pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , objectType{ objectType_ }
      , object{ object_ }
      , pObjectName{ pObjectName_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
    {
    }

    DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
    {
      object = object_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjectName = pObjectName_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DebugReportObjectTypeEXT const &, uint64_t const &, const char * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, objectType, object, pObjectName );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 )
        return cmp;
      if ( auto cmp = object <=> rhs.object; cmp != 0 )
        return cmp;
      if ( pObjectName != rhs.pObjectName )
        if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) &&
             ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
    }

    bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType            sType       = StructureType::eDebugMarkerObjectNameInfoEXT;
    const void *             pNext       = {};
    DebugReportObjectTypeEXT objectType  = DebugReportObjectTypeEXT::eUnknown;
    uint64_t                 object      = {};
    const char *             pObjectName = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugMarkerObjectNameInfoEXT>
  {
    using Type = DebugMarkerObjectNameInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
  {
    using Type = DebugMarkerObjectNameInfoEXT;
  };

  // wrapper struct for struct VkDebugMarkerObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectTagInfoEXT.html
  struct DebugMarkerObjectTagInfoEXT
  {
    using NativeType = VkDebugMarkerObjectTagInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugMarkerObjectTagInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown,
                                                      uint64_t                 object_     = {},
                                                      uint64_t                 tagName_    = {},
                                                      size_t                   tagSize_    = {},
                                                      const void *             pTag_       = {},
                                                      const void *             pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , objectType{ objectType_ }
      , object{ object_ }
      , tagName{ tagName_ }
      , tagSize{ tagSize_ }
      , pTag{ pTag_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugMarkerObjectTagInfoEXT(
      DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
    {
      object = object_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
    {
      tagName = tagName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tagSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
    {
      pTag = pTag_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugMarkerObjectTagInfoEXT & setTag( ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tag_.size() * sizeof( T );
      pTag    = tag_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DebugReportObjectTypeEXT const &,
               uint64_t const &,
               uint64_t const &,
               size_t const &,
               const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
#else
    bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) &&
             ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
#  endif
    }

    bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType      = StructureType::eDebugMarkerObjectTagInfoEXT;
    const void *             pNext      = {};
    DebugReportObjectTypeEXT objectType = DebugReportObjectTypeEXT::eUnknown;
    uint64_t                 object     = {};
    uint64_t                 tagName    = {};
    size_t                   tagSize    = {};
    const void *             pTag       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugMarkerObjectTagInfoEXT>
  {
    using Type = DebugMarkerObjectTagInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
  {
    using Type = DebugMarkerObjectTagInfoEXT;
  };

  typedef Bool32( VKAPI_PTR * PFN_DebugReportCallbackEXT )( DebugReportFlagsEXT      flags,
                                                            DebugReportObjectTypeEXT objectType,
                                                            uint64_t                 object,
                                                            size_t                   location,
                                                            int32_t                  messageCode,
                                                            const char *             pLayerPrefix,
                                                            const char *             pMessage,
                                                            void *                   pUserData );

  // wrapper struct for struct VkDebugReportCallbackCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackCreateInfoEXT.html
  struct DebugReportCallbackCreateInfoEXT
  {
    using NativeType = VkDebugReportCallbackCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugReportCallbackCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT        flags_       = {},
                                                           PFN_DebugReportCallbackEXT pfnCallback_ = {},
                                                           void *                     pUserData_   = {},
                                                           const void *               pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pfnCallback{ pfnCallback_ }
      , pUserData{ pUserData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )

    DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT          flags_,
                                      PFN_vkDebugReportCallbackEXT pfnCallback_,
                                      void *                       pUserData_ = {},
                                      const void *                 pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast<PFN_DebugReportCallbackEXT>( pfnCallback_ ), pUserData_, pNext_ )
    {
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif

    DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_DebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnCallback = pfnCallback_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnCallback( reinterpret_cast<PFN_DebugReportCallbackEXT>( pfnCallback_ ) );
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( this );
    }

    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>( this );
    }

    operator VkDebugReportCallbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( this );
    }

    operator VkDebugReportCallbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DebugReportFlagsEXT const &, PFN_DebugReportCallbackEXT const &, void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pfnCallback, pUserData );
    }
#endif

    bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData );
#endif
    }

    bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType              sType       = StructureType::eDebugReportCallbackCreateInfoEXT;
    const void *               pNext       = {};
    DebugReportFlagsEXT        flags       = {};
    PFN_DebugReportCallbackEXT pfnCallback = {};
    void *                     pUserData   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugReportCallbackCreateInfoEXT>
  {
    using Type = DebugReportCallbackCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
  {
    using Type = DebugReportCallbackCreateInfoEXT;
  };

  // wrapper struct for struct VkDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsLabelEXT.html
  struct DebugUtilsLabelEXT
  {
    using NativeType = VkDebugUtilsLabelEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugUtilsLabelEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array<float, 4> const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pLabelName{ pLabelName_ }
      , color{ color_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) ) {}

    DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
    {
      pLabelName = pLabelName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array<float, 4> color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
    }

    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
    }

    operator VkDebugUtilsLabelEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
    }

    operator VkDebugUtilsLabelEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const char * const &, ArrayWrapper1D<float, 4> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pLabelName, color );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( pLabelName != rhs.pLabelName )
        if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
          return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
      if ( auto cmp = color <=> rhs.color; cmp != 0 )
        return cmp;

      return std::partial_ordering::equivalent;
    }
#endif

    bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) &&
             ( color == rhs.color );
    }

    bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType            sType      = StructureType::eDebugUtilsLabelEXT;
    const void *             pNext      = {};
    const char *             pLabelName = {};
    ArrayWrapper1D<float, 4> color      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugUtilsLabelEXT>
  {
    using Type = DebugUtilsLabelEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
  {
    using Type = DebugUtilsLabelEXT;
  };

  // wrapper struct for struct VkDebugUtilsObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectNameInfoEXT.html
  struct DebugUtilsObjectNameInfoEXT
  {
    using NativeType = VkDebugUtilsObjectNameInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugUtilsObjectNameInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( ObjectType   objectType_   = ObjectType::eUnknown,
                                                      uint64_t     objectHandle_ = {},
                                                      const char * pObjectName_  = {},
                                                      const void * pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , objectType{ objectType_ }
      , objectHandle{ objectHandle_ }
      , pObjectName{ pObjectName_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
    {
    }

    DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      objectHandle = objectHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjectName = pObjectName_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ObjectType const &, uint64_t const &, const char * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 )
        return cmp;
      if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 )
        return cmp;
      if ( pObjectName != rhs.pObjectName )
        if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
             ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
    }

    bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType        = StructureType::eDebugUtilsObjectNameInfoEXT;
    const void *  pNext        = {};
    ObjectType    objectType   = ObjectType::eUnknown;
    uint64_t      objectHandle = {};
    const char *  pObjectName  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugUtilsObjectNameInfoEXT>
  {
    using Type = DebugUtilsObjectNameInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
  {
    using Type = DebugUtilsObjectNameInfoEXT;
  };

  // wrapper struct for struct VkDebugUtilsMessengerCallbackDataEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCallbackDataEXT.html
  struct DebugUtilsMessengerCallbackDataEXT
  {
    using NativeType = VkDebugUtilsMessengerCallbackDataEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugUtilsMessengerCallbackDataEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_            = {},
                                                                const char *                            pMessageIdName_   = {},
                                                                int32_t                                 messageIdNumber_  = {},
                                                                const char *                            pMessage_         = {},
                                                                uint32_t                                queueLabelCount_  = {},
                                                                const DebugUtilsLabelEXT *              pQueueLabels_     = {},
                                                                uint32_t                                cmdBufLabelCount_ = {},
                                                                const DebugUtilsLabelEXT *              pCmdBufLabels_    = {},
                                                                uint32_t                                objectCount_      = {},
                                                                const DebugUtilsObjectNameInfoEXT *     pObjects_         = {},
                                                                const void *                            pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pMessageIdName{ pMessageIdName_ }
      , messageIdNumber{ messageIdNumber_ }
      , pMessage{ pMessage_ }
      , queueLabelCount{ queueLabelCount_ }
      , pQueueLabels{ pQueueLabels_ }
      , cmdBufLabelCount{ cmdBufLabelCount_ }
      , pCmdBufLabels{ pCmdBufLabels_ }
      , objectCount{ objectCount_ }
      , pObjects{ pObjects_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT                            flags_,
                                        const char *                                                       pMessageIdName_,
                                        int32_t                                                            messageIdNumber_,
                                        const char *                                                       pMessage_,
                                        ArrayProxyNoTemporaries<const DebugUtilsLabelEXT> const &          queueLabels_,
                                        ArrayProxyNoTemporaries<const DebugUtilsLabelEXT> const &          cmdBufLabels_ = {},
                                        ArrayProxyNoTemporaries<const DebugUtilsObjectNameInfoEXT> const & objects_      = {},
                                        const void *                                                       pNext_        = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , pMessageIdName( pMessageIdName_ )
      , messageIdNumber( messageIdNumber_ )
      , pMessage( pMessage_ )
      , queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) )
      , pQueueLabels( queueLabels_.data() )
      , cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) )
      , pCmdBufLabels( cmdBufLabels_.data() )
      , objectCount( static_cast<uint32_t>( objects_.size() ) )
      , pObjects( objects_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
    {
      pMessageIdName = pMessageIdName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
    {
      messageIdNumber = messageIdNumber_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
    {
      pMessage = pMessage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueLabelCount = queueLabelCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueLabels = pQueueLabels_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setQueueLabels( ArrayProxyNoTemporaries<const DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
      pQueueLabels    = queueLabels_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      cmdBufLabelCount = cmdBufLabelCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      pCmdBufLabels = pCmdBufLabels_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( ArrayProxyNoTemporaries<const DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
      pCmdBufLabels    = cmdBufLabels_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
    {
      objectCount = objectCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects( const DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjects = pObjects_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setObjects( ArrayProxyNoTemporaries<const DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
    {
      objectCount = static_cast<uint32_t>( objects_.size() );
      pObjects    = objects_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

    operator VkDebugUtilsMessengerCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

    operator VkDebugUtilsMessengerCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DebugUtilsMessengerCallbackDataFlagsEXT const &,
               const char * const &,
               int32_t const &,
               const char * const &,
               uint32_t const &,
               const DebugUtilsLabelEXT * const &,
               uint32_t const &,
               const DebugUtilsLabelEXT * const &,
               uint32_t const &,
               const DebugUtilsObjectNameInfoEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( pMessageIdName != rhs.pMessageIdName )
        if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 )
        return cmp;
      if ( pMessage != rhs.pMessage )
        if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 )
        return cmp;
      if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 )
        return cmp;
      if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) &&
             ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) &&
             ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) &&
             ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects );
    }

    bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                           sType            = StructureType::eDebugUtilsMessengerCallbackDataEXT;
    const void *                            pNext            = {};
    DebugUtilsMessengerCallbackDataFlagsEXT flags            = {};
    const char *                            pMessageIdName   = {};
    int32_t                                 messageIdNumber  = {};
    const char *                            pMessage         = {};
    uint32_t                                queueLabelCount  = {};
    const DebugUtilsLabelEXT *              pQueueLabels     = {};
    uint32_t                                cmdBufLabelCount = {};
    const DebugUtilsLabelEXT *              pCmdBufLabels    = {};
    uint32_t                                objectCount      = {};
    const DebugUtilsObjectNameInfoEXT *     pObjects         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugUtilsMessengerCallbackDataEXT>
  {
    using Type = DebugUtilsMessengerCallbackDataEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
  {
    using Type = DebugUtilsMessengerCallbackDataEXT;
  };

  typedef Bool32( VKAPI_PTR * PFN_DebugUtilsMessengerCallbackEXT )( DebugUtilsMessageSeverityFlagBitsEXT       messageSeverity,
                                                                    DebugUtilsMessageTypeFlagsEXT              messageTypes,
                                                                    const DebugUtilsMessengerCallbackDataEXT * pCallbackData,
                                                                    void *                                     pUserData );

  // wrapper struct for struct VkDebugUtilsMessengerCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCreateInfoEXT.html
  struct DebugUtilsMessengerCreateInfoEXT
  {
    using NativeType = VkDebugUtilsMessengerCreateInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugUtilsMessengerCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT  flags_           = {},
                                                           DebugUtilsMessageSeverityFlagsEXT  messageSeverity_ = {},
                                                           DebugUtilsMessageTypeFlagsEXT      messageType_     = {},
                                                           PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
                                                           void *                             pUserData_       = {},
                                                           const void *                       pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , messageSeverity{ messageSeverity_ }
      , messageType{ messageType_ }
      , pfnUserCallback{ pfnUserCallback_ }
      , pUserData{ pUserData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )

    DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT    flags_,
                                      DebugUtilsMessageSeverityFlagsEXT    messageSeverity_,
                                      DebugUtilsMessageTypeFlagsEXT        messageType_,
                                      PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_,
                                      void *                               pUserData_ = {},
                                      const void *                         pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsMessengerCreateInfoEXT(
          flags_, messageSeverity_, messageType_, reinterpret_cast<PFN_DebugUtilsMessengerCallbackEXT>( pfnUserCallback_ ), pUserData_, pNext_ )
    {
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif

    DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
    {
      messageSeverity = messageSeverity_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
    {
      messageType = messageType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnUserCallback = pfnUserCallback_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnUserCallback( reinterpret_cast<PFN_DebugUtilsMessengerCallbackEXT>( pfnUserCallback_ ) );
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

    operator VkDebugUtilsMessengerCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

    operator VkDebugUtilsMessengerCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DebugUtilsMessengerCreateFlagsEXT const &,
               DebugUtilsMessageSeverityFlagsEXT const &,
               DebugUtilsMessageTypeFlagsEXT const &,
               PFN_DebugUtilsMessengerCallbackEXT const &,
               void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData );
    }
#endif

    bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) &&
             ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
#endif
    }

    bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                      sType           = StructureType::eDebugUtilsMessengerCreateInfoEXT;
    const void *                       pNext           = {};
    DebugUtilsMessengerCreateFlagsEXT  flags           = {};
    DebugUtilsMessageSeverityFlagsEXT  messageSeverity = {};
    DebugUtilsMessageTypeFlagsEXT      messageType     = {};
    PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {};
    void *                             pUserData       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugUtilsMessengerCreateInfoEXT>
  {
    using Type = DebugUtilsMessengerCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
  {
    using Type = DebugUtilsMessengerCreateInfoEXT;
  };

  // wrapper struct for struct VkDebugUtilsObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectTagInfoEXT.html
  struct DebugUtilsObjectTagInfoEXT
  {
    using NativeType = VkDebugUtilsObjectTagInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDebugUtilsObjectTagInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( ObjectType   objectType_   = ObjectType::eUnknown,
                                                     uint64_t     objectHandle_ = {},
                                                     uint64_t     tagName_      = {},
                                                     size_t       tagSize_      = {},
                                                     const void * pTag_         = {},
                                                     const void * pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , objectType{ objectType_ }
      , objectHandle{ objectHandle_ }
      , tagName{ tagName_ }
      , tagSize{ tagSize_ }
      , pTag{ pTag_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugUtilsObjectTagInfoEXT(
      ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, ArrayProxyNoTemporaries<const T> const & tag_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , objectType( objectType_ )
      , objectHandle( objectHandle_ )
      , tagName( tagName_ )
      , tagSize( tag_.size() * sizeof( T ) )
      , pTag( tag_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      objectHandle = objectHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
    {
      tagName = tagName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tagSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
    {
      pTag = pTag_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugUtilsObjectTagInfoEXT & setTag( ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tag_.size() * sizeof( T );
      pTag    = tag_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ObjectType const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
#else
    bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) &&
             ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
#  endif
    }

    bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eDebugUtilsObjectTagInfoEXT;
    const void *  pNext        = {};
    ObjectType    objectType   = ObjectType::eUnknown;
    uint64_t      objectHandle = {};
    uint64_t      tagName      = {};
    size_t        tagSize      = {};
    const void *  pTag         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDebugUtilsObjectTagInfoEXT>
  {
    using Type = DebugUtilsObjectTagInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
  {
    using Type = DebugUtilsObjectTagInfoEXT;
  };

  // wrapper struct for struct VkDecompressMemoryRegionEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionEXT.html
  struct DecompressMemoryRegionEXT
  {
    using NativeType = VkDecompressMemoryRegionEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DecompressMemoryRegionEXT( DeviceAddress srcAddress_       = {},
                                                    DeviceAddress dstAddress_       = {},
                                                    DeviceSize    compressedSize_   = {},
                                                    DeviceSize    decompressedSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAddress{ srcAddress_ }
      , dstAddress{ dstAddress_ }
      , compressedSize{ compressedSize_ }
      , decompressedSize{ decompressedSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DecompressMemoryRegionEXT( DecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DecompressMemoryRegionEXT( VkDecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DecompressMemoryRegionEXT( *reinterpret_cast<DecompressMemoryRegionEXT const *>( &rhs ) )
    {
    }

    DecompressMemoryRegionEXT & operator=( DecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DecompressMemoryRegionEXT & operator=( VkDecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DecompressMemoryRegionEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setSrcAddress( DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAddress = srcAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setDstAddress( DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAddress = dstAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setCompressedSize( DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      compressedSize = compressedSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setDecompressedSize( DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      decompressedSize = decompressedSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDecompressMemoryRegionEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDecompressMemoryRegionEXT *>( this );
    }

    operator VkDecompressMemoryRegionEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDecompressMemoryRegionEXT *>( this );
    }

    operator VkDecompressMemoryRegionEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDecompressMemoryRegionEXT *>( this );
    }

    operator VkDecompressMemoryRegionEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDecompressMemoryRegionEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceAddress const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DecompressMemoryRegionEXT const & ) const = default;
#else
    bool operator==( DecompressMemoryRegionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) &&
             ( decompressedSize == rhs.decompressedSize );
#  endif
    }

    bool operator!=( DecompressMemoryRegionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress srcAddress       = {};
    DeviceAddress dstAddress       = {};
    DeviceSize    compressedSize   = {};
    DeviceSize    decompressedSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDecompressMemoryRegionEXT>
  {
    using Type = DecompressMemoryRegionEXT;
  };
#endif

  // wrapper struct for struct VkDecompressMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryInfoEXT.html
  struct DecompressMemoryInfoEXT
  {
    using NativeType = VkDecompressMemoryInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDecompressMemoryInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DecompressMemoryInfoEXT( MemoryDecompressionMethodFlagsEXT decompressionMethod_ = {},
                                                  uint32_t                          regionCount_         = {},
                                                  const DecompressMemoryRegionEXT * pRegions_            = {},
                                                  const void *                      pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , decompressionMethod{ decompressionMethod_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DecompressMemoryInfoEXT( DecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DecompressMemoryInfoEXT( VkDecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DecompressMemoryInfoEXT( *reinterpret_cast<DecompressMemoryInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DecompressMemoryInfoEXT( MemoryDecompressionMethodFlagsEXT                                decompressionMethod_,
                             ArrayProxyNoTemporaries<const DecompressMemoryRegionEXT> const & regions_,
                             const void *                                                     pNext_ = nullptr )
      : pNext( pNext_ ), decompressionMethod( decompressionMethod_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DecompressMemoryInfoEXT & operator=( DecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DecompressMemoryInfoEXT & operator=( VkDecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DecompressMemoryInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setDecompressionMethod( MemoryDecompressionMethodFlagsEXT decompressionMethod_ ) VULKAN_HPP_NOEXCEPT
    {
      decompressionMethod = decompressionMethod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setPRegions( const DecompressMemoryRegionEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DecompressMemoryInfoEXT & setRegions( ArrayProxyNoTemporaries<const DecompressMemoryRegionEXT> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDecompressMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDecompressMemoryInfoEXT *>( this );
    }

    operator VkDecompressMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDecompressMemoryInfoEXT *>( this );
    }

    operator VkDecompressMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDecompressMemoryInfoEXT *>( this );
    }

    operator VkDecompressMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDecompressMemoryInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, MemoryDecompressionMethodFlagsEXT const &, uint32_t const &, const DecompressMemoryRegionEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, decompressionMethod, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DecompressMemoryInfoEXT const & ) const = default;
#else
    bool operator==( DecompressMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethod == rhs.decompressionMethod ) && ( regionCount == rhs.regionCount ) &&
             ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( DecompressMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType               = StructureType::eDecompressMemoryInfoEXT;
    const void *                      pNext               = {};
    MemoryDecompressionMethodFlagsEXT decompressionMethod = {};
    uint32_t                          regionCount         = {};
    const DecompressMemoryRegionEXT * pRegions            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDecompressMemoryInfoEXT>
  {
    using Type = DecompressMemoryInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDecompressMemoryInfoEXT>
  {
    using Type = DecompressMemoryInfoEXT;
  };

  // wrapper struct for struct VkDecompressMemoryRegionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionNV.html
  struct DecompressMemoryRegionNV
  {
    using NativeType = VkDecompressMemoryRegionNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DeviceAddress                    srcAddress_          = {},
                                                   DeviceAddress                    dstAddress_          = {},
                                                   DeviceSize                       compressedSize_      = {},
                                                   DeviceSize                       decompressedSize_    = {},
                                                   MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAddress{ srcAddress_ }
      , dstAddress{ dstAddress_ }
      , compressedSize{ compressedSize_ }
      , decompressedSize{ decompressedSize_ }
      , decompressionMethod{ decompressionMethod_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DecompressMemoryRegionNV( *reinterpret_cast<DecompressMemoryRegionNV const *>( &rhs ) )
    {
    }

    DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DecompressMemoryRegionNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAddress = srcAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAddress = dstAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      compressedSize = compressedSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      decompressedSize = decompressedSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressionMethod( MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT
    {
      decompressionMethod = decompressionMethod_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDecompressMemoryRegionNV *>( this );
    }

    operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDecompressMemoryRegionNV *>( this );
    }

    operator VkDecompressMemoryRegionNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDecompressMemoryRegionNV *>( this );
    }

    operator VkDecompressMemoryRegionNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDecompressMemoryRegionNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, DeviceAddress const &, DeviceSize const &, DeviceSize const &, MemoryDecompressionMethodFlagsNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DecompressMemoryRegionNV const & ) const = default;
#else
    bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) &&
             ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod );
#  endif
    }

    bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress                    srcAddress          = {};
    DeviceAddress                    dstAddress          = {};
    DeviceSize                       compressedSize      = {};
    DeviceSize                       decompressedSize    = {};
    MemoryDecompressionMethodFlagsNV decompressionMethod = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDecompressMemoryRegionNV>
  {
    using Type = DecompressMemoryRegionNV;
  };
#endif

  // wrapper struct for struct VkDedicatedAllocationBufferCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationBufferCreateInfoNV.html
  struct DedicatedAllocationBufferCreateInfoNV
  {
    using NativeType = VkDedicatedAllocationBufferCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDedicatedAllocationBufferCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dedicatedAllocation{ dedicatedAllocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
    {
    }

    DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocation = dedicatedAllocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationBufferCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationBufferCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dedicatedAllocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
#  endif
    }

    bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eDedicatedAllocationBufferCreateInfoNV;
    const void *  pNext               = {};
    Bool32        dedicatedAllocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDedicatedAllocationBufferCreateInfoNV>
  {
    using Type = DedicatedAllocationBufferCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
  {
    using Type = DedicatedAllocationBufferCreateInfoNV;
  };

  // wrapper struct for struct VkDedicatedAllocationImageCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationImageCreateInfoNV.html
  struct DedicatedAllocationImageCreateInfoNV
  {
    using NativeType = VkDedicatedAllocationImageCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDedicatedAllocationImageCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dedicatedAllocation{ dedicatedAllocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
    {
    }

    DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocation = dedicatedAllocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dedicatedAllocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
#  endif
    }

    bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eDedicatedAllocationImageCreateInfoNV;
    const void *  pNext               = {};
    Bool32        dedicatedAllocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDedicatedAllocationImageCreateInfoNV>
  {
    using Type = DedicatedAllocationImageCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
  {
    using Type = DedicatedAllocationImageCreateInfoNV;
  };

  // wrapper struct for struct VkDedicatedAllocationMemoryAllocateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html
  struct DedicatedAllocationMemoryAllocateInfoNV
  {
    using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( Image image_ = {}, Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
    {
    }

    DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

    operator VkDedicatedAllocationMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

    operator VkDedicatedAllocationMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
    const void *  pNext  = {};
    Image         image  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDedicatedAllocationMemoryAllocateInfoNV>
  {
    using Type = DedicatedAllocationMemoryAllocateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
  {
    using Type = DedicatedAllocationMemoryAllocateInfoNV;
  };

  // wrapper struct for struct VkMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier2.html
  struct MemoryBarrier2
  {
    using NativeType = VkMemoryBarrier2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryBarrier2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryBarrier2( PipelineStageFlags2 srcStageMask_  = {},
                                         AccessFlags2        srcAccessMask_ = {},
                                         PipelineStageFlags2 dstStageMask_  = {},
                                         AccessFlags2        dstAccessMask_ = {},
                                         const void *        pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcStageMask{ srcStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstStageMask{ dstStageMask_ }
      , dstAccessMask{ dstAccessMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) ) {}

    MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryBarrier2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryBarrier2 *>( this );
    }

    operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryBarrier2 *>( this );
    }

    operator VkMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryBarrier2 *>( this );
    }

    operator VkMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryBarrier2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, PipelineStageFlags2 const &, AccessFlags2 const &, PipelineStageFlags2 const &, AccessFlags2 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryBarrier2 const & ) const = default;
#else
    bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask );
#  endif
    }

    bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType         = StructureType::eMemoryBarrier2;
    const void *        pNext         = {};
    PipelineStageFlags2 srcStageMask  = {};
    AccessFlags2        srcAccessMask = {};
    PipelineStageFlags2 dstStageMask  = {};
    AccessFlags2        dstAccessMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryBarrier2>
  {
    using Type = MemoryBarrier2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryBarrier2>
  {
    using Type = MemoryBarrier2;
  };

  using MemoryBarrier2KHR = MemoryBarrier2;

  // wrapper struct for struct VkImageSubresourceRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceRange.html
  struct ImageSubresourceRange
  {
    using NativeType = VkImageSubresourceRange;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageAspectFlags aspectMask_     = {},
                                                uint32_t         baseMipLevel_   = {},
                                                uint32_t         levelCount_     = {},
                                                uint32_t         baseArrayLayer_ = {},
                                                uint32_t         layerCount_     = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask{ aspectMask_ }
      , baseMipLevel{ baseMipLevel_ }
      , levelCount{ levelCount_ }
      , baseArrayLayer{ baseArrayLayer_ }
      , layerCount{ layerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
    {
    }

    ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSubresourceRange const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      baseMipLevel = baseMipLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      levelCount = levelCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresourceRange *>( this );
    }

    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresourceRange *>( this );
    }

    operator VkImageSubresourceRange const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSubresourceRange *>( this );
    }

    operator VkImageSubresourceRange *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSubresourceRange *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresourceRange const & ) const = default;
#else
    bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) &&
             ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
#  endif
    }

    bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageAspectFlags aspectMask     = {};
    uint32_t         baseMipLevel   = {};
    uint32_t         levelCount     = {};
    uint32_t         baseArrayLayer = {};
    uint32_t         layerCount     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSubresourceRange>
  {
    using Type = ImageSubresourceRange;
  };
#endif

  // wrapper struct for struct VkImageMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier2.html
  struct ImageMemoryBarrier2
  {
    using NativeType = VkImageMemoryBarrier2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageMemoryBarrier2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( PipelineStageFlags2   srcStageMask_        = {},
                                              AccessFlags2          srcAccessMask_       = {},
                                              PipelineStageFlags2   dstStageMask_        = {},
                                              AccessFlags2          dstAccessMask_       = {},
                                              ImageLayout           oldLayout_           = ImageLayout::eUndefined,
                                              ImageLayout           newLayout_           = ImageLayout::eUndefined,
                                              uint32_t              srcQueueFamilyIndex_ = {},
                                              uint32_t              dstQueueFamilyIndex_ = {},
                                              Image                 image_               = {},
                                              ImageSubresourceRange subresourceRange_    = {},
                                              const void *          pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcStageMask{ srcStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstStageMask{ dstStageMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , oldLayout{ oldLayout_ }
      , newLayout{ newLayout_ }
      , srcQueueFamilyIndex{ srcQueueFamilyIndex_ }
      , dstQueueFamilyIndex{ dstQueueFamilyIndex_ }
      , image{ image_ }
      , subresourceRange{ subresourceRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
    {
    }

    ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      oldLayout = oldLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      newLayout = newLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryBarrier2 *>( this );
    }

    operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryBarrier2 *>( this );
    }

    operator VkImageMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageMemoryBarrier2 *>( this );
    }

    operator VkImageMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageMemoryBarrier2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               ImageLayout const &,
               ImageLayout const &,
               uint32_t const &,
               uint32_t const &,
               Image const &,
               ImageSubresourceRange const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       srcStageMask,
                       srcAccessMask,
                       dstStageMask,
                       dstAccessMask,
                       oldLayout,
                       newLayout,
                       srcQueueFamilyIndex,
                       dstQueueFamilyIndex,
                       image,
                       subresourceRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryBarrier2 const & ) const = default;
#else
    bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
             ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) &&
             ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
#  endif
    }

    bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType               = StructureType::eImageMemoryBarrier2;
    const void *          pNext               = {};
    PipelineStageFlags2   srcStageMask        = {};
    AccessFlags2          srcAccessMask       = {};
    PipelineStageFlags2   dstStageMask        = {};
    AccessFlags2          dstAccessMask       = {};
    ImageLayout           oldLayout           = ImageLayout::eUndefined;
    ImageLayout           newLayout           = ImageLayout::eUndefined;
    uint32_t              srcQueueFamilyIndex = {};
    uint32_t              dstQueueFamilyIndex = {};
    Image                 image               = {};
    ImageSubresourceRange subresourceRange    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageMemoryBarrier2>
  {
    using Type = ImageMemoryBarrier2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
  {
    using Type = ImageMemoryBarrier2;
  };

  using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;

  // wrapper struct for struct VkDependencyInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyInfo.html
  struct DependencyInfo
  {
    using NativeType = VkDependencyInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDependencyInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyFlags              dependencyFlags_          = {},
                                         uint32_t                     memoryBarrierCount_       = {},
                                         const MemoryBarrier2 *       pMemoryBarriers_          = {},
                                         uint32_t                     bufferMemoryBarrierCount_ = {},
                                         const BufferMemoryBarrier2 * pBufferMemoryBarriers_    = {},
                                         uint32_t                     imageMemoryBarrierCount_  = {},
                                         const ImageMemoryBarrier2 *  pImageMemoryBarriers_     = {},
                                         const void *                 pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dependencyFlags{ dependencyFlags_ }
      , memoryBarrierCount{ memoryBarrierCount_ }
      , pMemoryBarriers{ pMemoryBarriers_ }
      , bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ }
      , pBufferMemoryBarriers{ pBufferMemoryBarriers_ }
      , imageMemoryBarrierCount{ imageMemoryBarrierCount_ }
      , pImageMemoryBarriers{ pImageMemoryBarriers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfo( DependencyFlags                                             dependencyFlags_,
                    ArrayProxyNoTemporaries<const MemoryBarrier2> const &       memoryBarriers_,
                    ArrayProxyNoTemporaries<const BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {},
                    ArrayProxyNoTemporaries<const ImageMemoryBarrier2> const &  imageMemoryBarriers_  = {},
                    const void *                                                pNext_                = nullptr )
      : pNext( pNext_ )
      , dependencyFlags( dependencyFlags_ )
      , memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) )
      , pMemoryBarriers( memoryBarriers_.data() )
      , bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) )
      , pBufferMemoryBarriers( bufferMemoryBarriers_.data() )
      , imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) )
      , pImageMemoryBarriers( imageMemoryBarriers_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DependencyInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBarrierCount = memoryBarrierCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pMemoryBarriers = pMemoryBarriers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfo & setMemoryBarriers( ArrayProxyNoTemporaries<const MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
      pMemoryBarriers    = memoryBarriers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( const BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferMemoryBarriers = pBufferMemoryBarriers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfo & setBufferMemoryBarriers( ArrayProxyNoTemporaries<const BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
      pBufferMemoryBarriers    = bufferMemoryBarriers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageMemoryBarrierCount = imageMemoryBarrierCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( const ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageMemoryBarriers = pImageMemoryBarriers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfo & setImageMemoryBarriers( ArrayProxyNoTemporaries<const ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
      pImageMemoryBarriers    = imageMemoryBarriers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDependencyInfo *>( this );
    }

    operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDependencyInfo *>( this );
    }

    operator VkDependencyInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDependencyInfo *>( this );
    }

    operator VkDependencyInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDependencyInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DependencyFlags const &,
               uint32_t const &,
               const MemoryBarrier2 * const &,
               uint32_t const &,
               const BufferMemoryBarrier2 * const &,
               uint32_t const &,
               const ImageMemoryBarrier2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       dependencyFlags,
                       memoryBarrierCount,
                       pMemoryBarriers,
                       bufferMemoryBarrierCount,
                       pBufferMemoryBarriers,
                       imageMemoryBarrierCount,
                       pImageMemoryBarriers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DependencyInfo const & ) const = default;
#else
    bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) &&
             ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) &&
             ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) &&
             ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
#  endif
    }

    bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                    = StructureType::eDependencyInfo;
    const void *                 pNext                    = {};
    DependencyFlags              dependencyFlags          = {};
    uint32_t                     memoryBarrierCount       = {};
    const MemoryBarrier2 *       pMemoryBarriers          = {};
    uint32_t                     bufferMemoryBarrierCount = {};
    const BufferMemoryBarrier2 * pBufferMemoryBarriers    = {};
    uint32_t                     imageMemoryBarrierCount  = {};
    const ImageMemoryBarrier2 *  pImageMemoryBarriers     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDependencyInfo>
  {
    using Type = DependencyInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDependencyInfo>
  {
    using Type = DependencyInfo;
  };

  using DependencyInfoKHR = DependencyInfo;

  // wrapper struct for struct VkDepthBiasInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasInfoEXT.html
  struct DepthBiasInfoEXT
  {
    using NativeType = VkDepthBiasInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDepthBiasInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( float        depthBiasConstantFactor_ = {},
                                           float        depthBiasClamp_          = {},
                                           float        depthBiasSlopeFactor_    = {},
                                           const void * pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthBiasConstantFactor{ depthBiasConstantFactor_ }
      , depthBiasClamp{ depthBiasClamp_ }
      , depthBiasSlopeFactor{ depthBiasSlopeFactor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasInfoEXT( *reinterpret_cast<DepthBiasInfoEXT const *>( &rhs ) ) {}

    DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DepthBiasInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasConstantFactor = depthBiasConstantFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasClamp = depthBiasClamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasSlopeFactor = depthBiasSlopeFactor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDepthBiasInfoEXT *>( this );
    }

    operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDepthBiasInfoEXT *>( this );
    }

    operator VkDepthBiasInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDepthBiasInfoEXT *>( this );
    }

    operator VkDepthBiasInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDepthBiasInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, float const &, float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DepthBiasInfoEXT const & ) const = default;
#else
    bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
             ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor );
#  endif
    }

    bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::eDepthBiasInfoEXT;
    const void *  pNext                   = {};
    float         depthBiasConstantFactor = {};
    float         depthBiasClamp          = {};
    float         depthBiasSlopeFactor    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDepthBiasInfoEXT>
  {
    using Type = DepthBiasInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDepthBiasInfoEXT>
  {
    using Type = DepthBiasInfoEXT;
  };

  // wrapper struct for struct VkDepthBiasRepresentationInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasRepresentationInfoEXT.html
  struct DepthBiasRepresentationInfoEXT
  {
    using NativeType = VkDepthBiasRepresentationInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDepthBiasRepresentationInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DepthBiasRepresentationInfoEXT( DepthBiasRepresentationEXT depthBiasRepresentation_ = DepthBiasRepresentationEXT::eLeastRepresentableValueFormat,
                                      Bool32                     depthBiasExact_          = {},
                                      const void *               pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthBiasRepresentation{ depthBiasRepresentation_ }
      , depthBiasExact{ depthBiasExact_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DepthBiasRepresentationInfoEXT( *reinterpret_cast<DepthBiasRepresentationInfoEXT const *>( &rhs ) )
    {
    }

    DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DepthBiasRepresentationInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT &
      setDepthBiasRepresentation( DepthBiasRepresentationEXT depthBiasRepresentation_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasRepresentation = depthBiasRepresentation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasExact = depthBiasExact_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDepthBiasRepresentationInfoEXT *>( this );
    }

    operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDepthBiasRepresentationInfoEXT *>( this );
    }

    operator VkDepthBiasRepresentationInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDepthBiasRepresentationInfoEXT *>( this );
    }

    operator VkDepthBiasRepresentationInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDepthBiasRepresentationInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DepthBiasRepresentationEXT const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default;
#else
    bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasRepresentation == rhs.depthBiasRepresentation ) &&
             ( depthBiasExact == rhs.depthBiasExact );
#  endif
    }

    bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                   = StructureType::eDepthBiasRepresentationInfoEXT;
    const void *               pNext                   = {};
    DepthBiasRepresentationEXT depthBiasRepresentation = DepthBiasRepresentationEXT::eLeastRepresentableValueFormat;
    Bool32                     depthBiasExact          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDepthBiasRepresentationInfoEXT>
  {
    using Type = DepthBiasRepresentationInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDepthBiasRepresentationInfoEXT>
  {
    using Type = DepthBiasRepresentationInfoEXT;
  };

  // wrapper struct for struct VkDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthClampRangeEXT.html
  struct DepthClampRangeEXT
  {
    using NativeType = VkDepthClampRangeEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( float minDepthClamp_ = {}, float maxDepthClamp_ = {} ) VULKAN_HPP_NOEXCEPT
      : minDepthClamp{ minDepthClamp_ }
      , maxDepthClamp{ maxDepthClamp_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthClampRangeEXT( *reinterpret_cast<DepthClampRangeEXT const *>( &rhs ) ) {}

    DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DepthClampRangeEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      minDepthClamp = minDepthClamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDepthClamp = maxDepthClamp_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDepthClampRangeEXT *>( this );
    }

    operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDepthClampRangeEXT *>( this );
    }

    operator VkDepthClampRangeEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDepthClampRangeEXT *>( this );
    }

    operator VkDepthClampRangeEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDepthClampRangeEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( minDepthClamp, maxDepthClamp );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DepthClampRangeEXT const & ) const = default;
#else
    bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( minDepthClamp == rhs.minDepthClamp ) && ( maxDepthClamp == rhs.maxDepthClamp );
#  endif
    }

    bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float minDepthClamp = {};
    float maxDepthClamp = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDepthClampRangeEXT>
  {
    using Type = DepthClampRangeEXT;
  };
#endif

  // wrapper struct for struct VkDescriptorAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorAddressInfoEXT.html
  struct DescriptorAddressInfoEXT
  {
    using NativeType = VkDescriptorAddressInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorAddressInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DeviceAddress address_ = {},
                                                   DeviceSize    range_   = {},
                                                   Format        format_  = Format::eUndefined,
                                                   void *        pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , address{ address_ }
      , range{ range_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorAddressInfoEXT( *reinterpret_cast<DescriptorAddressInfoEXT const *>( &rhs ) )
    {
    }

    DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorAddressInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
    {
      address = address_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
    {
      range = range_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorAddressInfoEXT *>( this );
    }

    operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorAddressInfoEXT *>( this );
    }

    operator VkDescriptorAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorAddressInfoEXT *>( this );
    }

    operator VkDescriptorAddressInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorAddressInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceAddress const &, DeviceSize const &, Format const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, address, range, format );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorAddressInfoEXT const & ) const = default;
#else
    bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( range == rhs.range ) && ( format == rhs.format );
#  endif
    }

    bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::eDescriptorAddressInfoEXT;
    void *        pNext   = {};
    DeviceAddress address = {};
    DeviceSize    range   = {};
    Format        format  = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorAddressInfoEXT>
  {
    using Type = DescriptorAddressInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorAddressInfoEXT>
  {
    using Type = DescriptorAddressInfoEXT;
  };

  // wrapper struct for struct VkDescriptorBufferBindingInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingInfoEXT.html
  struct DescriptorBufferBindingInfoEXT
  {
    using NativeType = VkDescriptorBufferBindingInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorBufferBindingInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorBufferBindingInfoEXT( DeviceAddress address_ = {}, BufferUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , address{ address_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorBufferBindingInfoEXT( *reinterpret_cast<DescriptorBufferBindingInfoEXT const *>( &rhs ) )
    {
    }

    DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorBufferBindingInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT
    {
      address = address_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( this );
    }

    operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorBufferBindingInfoEXT *>( this );
    }

    operator VkDescriptorBufferBindingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( this );
    }

    operator VkDescriptorBufferBindingInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorBufferBindingInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceAddress const &, BufferUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, address, usage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default;
#else
    bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( usage == rhs.usage );
#  endif
    }

    bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType   = StructureType::eDescriptorBufferBindingInfoEXT;
    const void *     pNext   = {};
    DeviceAddress    address = {};
    BufferUsageFlags usage   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorBufferBindingInfoEXT>
  {
    using Type = DescriptorBufferBindingInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorBufferBindingInfoEXT>
  {
    using Type = DescriptorBufferBindingInfoEXT;
  };

  // wrapper struct for struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingPushDescriptorBufferHandleEXT.html
  struct DescriptorBufferBindingPushDescriptorBufferHandleEXT
  {
    using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast<DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs ) )
    {
    }

    DescriptorBufferBindingPushDescriptorBufferHandleEXT &
      operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorBufferBindingPushDescriptorBufferHandleEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *>( this );
    }

    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *>( this );
    }

    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *>( this );
    }

    operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default;
#else
    bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT;
    const void *  pNext  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorBufferBindingPushDescriptorBufferHandleEXT>
  {
    using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT>
  {
    using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT;
  };

  // wrapper struct for struct VkDescriptorBufferInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferInfo.html
  struct DescriptorBufferInfo
  {
    using NativeType = VkDescriptorBufferInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer{ buffer_ }
      , offset{ offset_ }
      , range{ range_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
    {
    }

    DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorBufferInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
    {
      range = range_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorBufferInfo *>( this );
    }

    operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorBufferInfo *>( this );
    }

    operator VkDescriptorBufferInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorBufferInfo *>( this );
    }

    operator VkDescriptorBufferInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorBufferInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Buffer const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( buffer, offset, range );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorBufferInfo const & ) const = default;
#else
    bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range );
#  endif
    }

    bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Buffer     buffer = {};
    DeviceSize offset = {};
    DeviceSize range  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorBufferInfo>
  {
    using Type = DescriptorBufferInfo;
  };
#endif

  // wrapper struct for struct VkDescriptorImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorImageInfo.html
  struct DescriptorImageInfo
  {
    using NativeType = VkDescriptorImageInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorImageInfo( Sampler sampler_ = {}, ImageView imageView_ = {}, ImageLayout imageLayout_ = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : sampler{ sampler_ }
      , imageView{ imageView_ }
      , imageLayout{ imageLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
    {
    }

    DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorImageInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler = sampler_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      imageLayout = imageLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorImageInfo *>( this );
    }

    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorImageInfo *>( this );
    }

    operator VkDescriptorImageInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorImageInfo *>( this );
    }

    operator VkDescriptorImageInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorImageInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Sampler const &, ImageView const &, ImageLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sampler, imageView, imageLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorImageInfo const & ) const = default;
#else
    bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
#  endif
    }

    bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Sampler     sampler     = {};
    ImageView   imageView   = {};
    ImageLayout imageLayout = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorImageInfo>
  {
    using Type = DescriptorImageInfo;
  };
#endif

  union DescriptorDataEXT
  {
    using NativeType = VkDescriptorDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const Sampler * pSampler_ = {} ) : pSampler( pSampler_ ) {}

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const DescriptorImageInfo * pDescriptorImageInfo_ ) : pCombinedImageSampler( pDescriptorImageInfo_ ) {}

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ ) : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( DeviceAddress accelerationStructure_ ) : accelerationStructure( accelerationStructure_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampler = pSampler_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPCombinedImageSampler( const DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT
    {
      pCombinedImageSampler = pCombinedImageSampler_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPInputAttachmentImage( const DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAttachmentImage = pInputAttachmentImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampledImage = pSampledImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT
    {
      pStorageImage = pStorageImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformTexelBuffer( const DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pUniformTexelBuffer = pUniformTexelBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageTexelBuffer( const DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pStorageTexelBuffer = pStorageTexelBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pUniformBuffer = pUniformBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pStorageBuffer = pStorageBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorDataEXT const &() const
    {
      return *reinterpret_cast<const VkDescriptorDataEXT *>( this );
    }

    operator VkDescriptorDataEXT &()
    {
      return *reinterpret_cast<VkDescriptorDataEXT *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    const Sampler *                  pSampler;
    const DescriptorImageInfo *      pCombinedImageSampler;
    const DescriptorImageInfo *      pInputAttachmentImage;
    const DescriptorImageInfo *      pSampledImage;
    const DescriptorImageInfo *      pStorageImage;
    const DescriptorAddressInfoEXT * pUniformTexelBuffer;
    const DescriptorAddressInfoEXT * pStorageTexelBuffer;
    const DescriptorAddressInfoEXT * pUniformBuffer;
    const DescriptorAddressInfoEXT * pStorageBuffer;
    DeviceAddress                    accelerationStructure;
#else
    const VkSampler *                  pSampler;
    const VkDescriptorImageInfo *      pCombinedImageSampler;
    const VkDescriptorImageInfo *      pInputAttachmentImage;
    const VkDescriptorImageInfo *      pSampledImage;
    const VkDescriptorImageInfo *      pStorageImage;
    const VkDescriptorAddressInfoEXT * pUniformTexelBuffer;
    const VkDescriptorAddressInfoEXT * pStorageTexelBuffer;
    const VkDescriptorAddressInfoEXT * pUniformBuffer;
    const VkDescriptorAddressInfoEXT * pStorageBuffer;
    VkDeviceAddress                    accelerationStructure;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorDataEXT>
  {
    using Type = DescriptorDataEXT;
  };
#endif

  // wrapper struct for struct VkDescriptorGetInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetInfoEXT.html
  struct DescriptorGetInfoEXT
  {
    using NativeType = VkDescriptorGetInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorGetInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      DescriptorGetInfoEXT( DescriptorType type_ = DescriptorType::eSampler, DescriptorDataEXT data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , data{ data_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorGetInfoEXT( *reinterpret_cast<DescriptorGetInfoEXT const *>( &rhs ) )
    {
    }

    DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorGetInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorGetInfoEXT *>( this );
    }

    operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorGetInfoEXT *>( this );
    }

    operator VkDescriptorGetInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorGetInfoEXT *>( this );
    }

    operator VkDescriptorGetInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorGetInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DescriptorType const &, DescriptorDataEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, data );
    }
#endif

  public:
    StructureType     sType = StructureType::eDescriptorGetInfoEXT;
    const void *      pNext = {};
    DescriptorType    type  = DescriptorType::eSampler;
    DescriptorDataEXT data  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorGetInfoEXT>
  {
    using Type = DescriptorGetInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorGetInfoEXT>
  {
    using Type = DescriptorGetInfoEXT;
  };

  // wrapper struct for struct VkDescriptorGetTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetTensorInfoARM.html
  struct DescriptorGetTensorInfoARM
  {
    using NativeType = VkDescriptorGetTensorInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorGetTensorInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM( TensorViewARM tensorView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorView{ tensorView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorGetTensorInfoARM( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorGetTensorInfoARM( *reinterpret_cast<DescriptorGetTensorInfoARM const *>( &rhs ) )
    {
    }

    DescriptorGetTensorInfoARM & operator=( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorGetTensorInfoARM & operator=( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorGetTensorInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setTensorView( TensorViewARM tensorView_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorView = tensorView_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorGetTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorGetTensorInfoARM *>( this );
    }

    operator VkDescriptorGetTensorInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorGetTensorInfoARM *>( this );
    }

    operator VkDescriptorGetTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorGetTensorInfoARM *>( this );
    }

    operator VkDescriptorGetTensorInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorGetTensorInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorViewARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorView );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorGetTensorInfoARM const & ) const = default;
#else
    bool operator==( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorView == rhs.tensorView );
#  endif
    }

    bool operator!=( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eDescriptorGetTensorInfoARM;
    const void *  pNext      = {};
    TensorViewARM tensorView = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorGetTensorInfoARM>
  {
    using Type = DescriptorGetTensorInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorGetTensorInfoARM>
  {
    using Type = DescriptorGetTensorInfoARM;
  };

  // wrapper struct for struct VkDescriptorPoolSize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolSize.html
  struct DescriptorPoolSize
  {
    using NativeType = VkDescriptorPoolSize;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : type{ type_ }
      , descriptorCount{ descriptorCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) ) {}

    DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorPoolSize const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolSize *>( this );
    }

    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolSize *>( this );
    }

    operator VkDescriptorPoolSize const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorPoolSize *>( this );
    }

    operator VkDescriptorPoolSize *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorPoolSize *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DescriptorType const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( type, descriptorCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolSize const & ) const = default;
#else
    bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount );
#  endif
    }

    bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DescriptorType type            = DescriptorType::eSampler;
    uint32_t       descriptorCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorPoolSize>
  {
    using Type = DescriptorPoolSize;
  };
#endif

  // wrapper struct for struct VkDescriptorPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateInfo.html
  struct DescriptorPoolCreateInfo
  {
    using NativeType = VkDescriptorPoolCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateFlags  flags_         = {},
                                                   uint32_t                   maxSets_       = {},
                                                   uint32_t                   poolSizeCount_ = {},
                                                   const DescriptorPoolSize * pPoolSizes_    = {},
                                                   const void *               pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , maxSets{ maxSets_ }
      , poolSizeCount{ poolSizeCount_ }
      , pPoolSizes{ pPoolSizes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorPoolCreateInfo( DescriptorPoolCreateFlags                                 flags_,
                              uint32_t                                                  maxSets_,
                              ArrayProxyNoTemporaries<const DescriptorPoolSize> const & poolSizes_,
                              const void *                                              pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSets = maxSets_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      poolSizeCount = poolSizeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPoolSizes = pPoolSizes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorPoolCreateInfo & setPoolSizes( ArrayProxyNoTemporaries<const DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
    {
      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
      pPoolSizes    = poolSizes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolCreateInfo *>( this );
    }

    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolCreateInfo *>( this );
    }

    operator VkDescriptorPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorPoolCreateInfo *>( this );
    }

    operator VkDescriptorPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DescriptorPoolCreateFlags const &,
               uint32_t const &,
               uint32_t const &,
               const DescriptorPoolSize * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) &&
             ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes );
#  endif
    }

    bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType         = StructureType::eDescriptorPoolCreateInfo;
    const void *               pNext         = {};
    DescriptorPoolCreateFlags  flags         = {};
    uint32_t                   maxSets       = {};
    uint32_t                   poolSizeCount = {};
    const DescriptorPoolSize * pPoolSizes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorPoolCreateInfo>
  {
    using Type = DescriptorPoolCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
  {
    using Type = DescriptorPoolCreateInfo;
  };

  // wrapper struct for struct VkDescriptorPoolInlineUniformBlockCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolInlineUniformBlockCreateInfo.html
  struct DescriptorPoolInlineUniformBlockCreateInfo
  {
    using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t     maxInlineUniformBlockBindings_ = {},
                                                                     const void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
    {
    }

    DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo &
      setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
    {
      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
    }

    operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
    }

    operator VkDescriptorPoolInlineUniformBlockCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
    }

    operator VkDescriptorPoolInlineUniformBlockCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxInlineUniformBlockBindings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
#  endif
    }

    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
    const void *  pNext                         = {};
    uint32_t      maxInlineUniformBlockBindings = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorPoolInlineUniformBlockCreateInfo>
  {
    using Type = DescriptorPoolInlineUniformBlockCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
  {
    using Type = DescriptorPoolInlineUniformBlockCreateInfo;
  };

  using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;

  // wrapper struct for struct VkDescriptorSetAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAllocateInfo.html
  struct DescriptorSetAllocateInfo
  {
    using NativeType = VkDescriptorSetAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorPool              descriptorPool_     = {},
                                                    uint32_t                    descriptorSetCount_ = {},
                                                    const DescriptorSetLayout * pSetLayouts_        = {},
                                                    const void *                pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorPool{ descriptorPool_ }
      , descriptorSetCount{ descriptorSetCount_ }
      , pSetLayouts{ pSetLayouts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetAllocateInfo( DescriptorPool                                             descriptorPool_,
                               ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_,
                               const void *                                               pNext_ = nullptr )
      : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorPool = descriptorPool_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = descriptorSetCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetAllocateInfo & setSetLayouts( ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts        = setLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetAllocateInfo *>( this );
    }

    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetAllocateInfo *>( this );
    }

    operator VkDescriptorSetAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetAllocateInfo *>( this );
    }

    operator VkDescriptorSetAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DescriptorPool const &, uint32_t const &, const DescriptorSetLayout * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
             ( pSetLayouts == rhs.pSetLayouts );
#  endif
    }

    bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType              = StructureType::eDescriptorSetAllocateInfo;
    const void *                pNext              = {};
    DescriptorPool              descriptorPool     = {};
    uint32_t                    descriptorSetCount = {};
    const DescriptorSetLayout * pSetLayouts        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetAllocateInfo>
  {
    using Type = DescriptorSetAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
  {
    using Type = DescriptorSetAllocateInfo;
  };

  // wrapper struct for struct VkDescriptorSetBindingReferenceVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetBindingReferenceVALVE.html
  struct DescriptorSetBindingReferenceVALVE
  {
    using NativeType = VkDescriptorSetBindingReferenceVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetBindingReferenceVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetLayout descriptorSetLayout_ = {},
                                                             uint32_t            binding_             = {},
                                                             const void *        pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorSetLayout{ descriptorSetLayout_ }
      , binding{ binding_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetBindingReferenceVALVE( *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs ) )
    {
    }

    DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetLayout = descriptorSetLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( this );
    }

    operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetBindingReferenceVALVE *>( this );
    }

    operator VkDescriptorSetBindingReferenceVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( this );
    }

    operator VkDescriptorSetBindingReferenceVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetBindingReferenceVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DescriptorSetLayout const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorSetLayout, binding );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default;
#else
    bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding );
#  endif
    }

    bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType               = StructureType::eDescriptorSetBindingReferenceVALVE;
    const void *        pNext               = {};
    DescriptorSetLayout descriptorSetLayout = {};
    uint32_t            binding             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetBindingReferenceVALVE>
  {
    using Type = DescriptorSetBindingReferenceVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetBindingReferenceVALVE>
  {
    using Type = DescriptorSetBindingReferenceVALVE;
  };

  // wrapper struct for struct VkDescriptorSetLayoutBinding, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBinding.html
  struct DescriptorSetLayoutBinding
  {
    using NativeType = VkDescriptorSetLayoutBinding;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t         binding_            = {},
                                                     DescriptorType   descriptorType_     = DescriptorType::eSampler,
                                                     uint32_t         descriptorCount_    = {},
                                                     ShaderStageFlags stageFlags_         = {},
                                                     const Sampler *  pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
      : binding{ binding_ }
      , descriptorType{ descriptorType_ }
      , descriptorCount{ descriptorCount_ }
      , stageFlags{ stageFlags_ }
      , pImmutableSamplers{ pImmutableSamplers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBinding( uint32_t                                       binding_,
                                DescriptorType                                 descriptorType_,
                                ShaderStageFlags                               stageFlags_,
                                ArrayProxyNoTemporaries<const Sampler> const & immutableSamplers_ )
      : binding( binding_ )
      , descriptorType( descriptorType_ )
      , descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) )
      , stageFlags( stageFlags_ )
      , pImmutableSamplers( immutableSamplers_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
    {
      pImmutableSamplers = pImmutableSamplers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBinding & setImmutableSamplers( ArrayProxyNoTemporaries<const Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount    = static_cast<uint32_t>( immutableSamplers_.size() );
      pImmutableSamplers = immutableSamplers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutBinding *>( this );
    }

    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutBinding *>( this );
    }

    operator VkDescriptorSetLayoutBinding const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetLayoutBinding *>( this );
    }

    operator VkDescriptorSetLayoutBinding *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetLayoutBinding *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, DescriptorType const &, uint32_t const &, ShaderStageFlags const &, const Sampler * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) &&
             ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers );
#  endif
    }

    bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t         binding            = {};
    DescriptorType   descriptorType     = DescriptorType::eSampler;
    uint32_t         descriptorCount    = {};
    ShaderStageFlags stageFlags         = {};
    const Sampler *  pImmutableSamplers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetLayoutBinding>
  {
    using Type = DescriptorSetLayoutBinding;
  };
#endif

  // wrapper struct for struct VkDescriptorSetLayoutBindingFlagsCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfo.html
  struct DescriptorSetLayoutBindingFlagsCreateInfo
  {
    using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t                       bindingCount_  = {},
                                                                    const DescriptorBindingFlags * pBindingFlags_ = {},
                                                                    const void *                   pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , bindingCount{ bindingCount_ }
      , pBindingFlags{ pBindingFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBindingFlagsCreateInfo( ArrayProxyNoTemporaries<const DescriptorBindingFlags> const & bindingFlags_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = bindingCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      pBindingFlags = pBindingFlags_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBindingFlagsCreateInfo &
      setBindingFlags( ArrayProxyNoTemporaries<const DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount  = static_cast<uint32_t>( bindingFlags_.size() );
      pBindingFlags = bindingFlags_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const DescriptorBindingFlags * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, bindingCount, pBindingFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags );
#  endif
    }

    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType         = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
    const void *                   pNext         = {};
    uint32_t                       bindingCount  = {};
    const DescriptorBindingFlags * pBindingFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetLayoutBindingFlagsCreateInfo>
  {
    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
  {
    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
  };

  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;

  // wrapper struct for struct VkDescriptorSetLayoutCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateInfo.html
  struct DescriptorSetLayoutCreateInfo
  {
    using NativeType = VkDescriptorSetLayoutCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetLayoutCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags     flags_        = {},
                                                        uint32_t                           bindingCount_ = {},
                                                        const DescriptorSetLayoutBinding * pBindings_    = {},
                                                        const void *                       pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , bindingCount{ bindingCount_ }
      , pBindings{ pBindings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags                                    flags_,
                                   ArrayProxyNoTemporaries<const DescriptorSetLayoutBinding> const & bindings_,
                                   const void *                                                      pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = bindingCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings( const DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
    {
      pBindings = pBindings_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutCreateInfo & setBindings( ArrayProxyNoTemporaries<const DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = static_cast<uint32_t>( bindings_.size() );
      pBindings    = bindings_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, DescriptorSetLayoutCreateFlags const &, uint32_t const &, const DescriptorSetLayoutBinding * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, bindingCount, pBindings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) &&
             ( pBindings == rhs.pBindings );
#  endif
    }

    bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType        = StructureType::eDescriptorSetLayoutCreateInfo;
    const void *                       pNext        = {};
    DescriptorSetLayoutCreateFlags     flags        = {};
    uint32_t                           bindingCount = {};
    const DescriptorSetLayoutBinding * pBindings    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetLayoutCreateInfo>
  {
    using Type = DescriptorSetLayoutCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
  {
    using Type = DescriptorSetLayoutCreateInfo;
  };

  // wrapper struct for struct VkDescriptorSetLayoutHostMappingInfoVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutHostMappingInfoVALVE.html
  struct DescriptorSetLayoutHostMappingInfoVALVE
  {
    using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorOffset{ descriptorOffset_ }
      , descriptorSize{ descriptorSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs ) )
    {
    }

    DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorOffset = descriptorOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSize = descriptorSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
    }

    operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
    }

    operator VkDescriptorSetLayoutHostMappingInfoVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
    }

    operator VkDescriptorSetLayoutHostMappingInfoVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, size_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorOffset, descriptorSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize );
#  endif
    }

    bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
    void *        pNext            = {};
    size_t        descriptorOffset = {};
    uint32_t      descriptorSize   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetLayoutHostMappingInfoVALVE>
  {
    using Type = DescriptorSetLayoutHostMappingInfoVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutHostMappingInfoVALVE>
  {
    using Type = DescriptorSetLayoutHostMappingInfoVALVE;
  };

  // wrapper struct for struct VkDescriptorSetLayoutSupport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutSupport.html
  struct DescriptorSetLayoutSupport
  {
    using NativeType = VkDescriptorSetLayoutSupport;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetLayoutSupport;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supported{ supported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
    {
    }

    DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs );
      return *this;
    }

    operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutSupport *>( this );
    }

    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutSupport *>( this );
    }

    operator VkDescriptorSetLayoutSupport const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetLayoutSupport *>( this );
    }

    operator VkDescriptorSetLayoutSupport *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetLayoutSupport *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supported );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported );
#  endif
    }

    bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eDescriptorSetLayoutSupport;
    void *        pNext     = {};
    Bool32        supported = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetLayoutSupport>
  {
    using Type = DescriptorSetLayoutSupport;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
  {
    using Type = DescriptorSetLayoutSupport;
  };

  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;

  // wrapper struct for struct VkDescriptorSetVariableDescriptorCountAllocateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfo.html
  struct DescriptorSetVariableDescriptorCountAllocateInfo
  {
    using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t         descriptorSetCount_ = {},
                                                                           const uint32_t * pDescriptorCounts_  = {},
                                                                           const void *     pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorSetCount{ descriptorSetCount_ }
      , pDescriptorCounts{ pDescriptorCounts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetVariableDescriptorCountAllocateInfo( ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = descriptorSetCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorCounts = pDescriptorCounts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetVariableDescriptorCountAllocateInfo &
      setDescriptorCounts( ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
      pDescriptorCounts  = descriptorCounts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
             ( pDescriptorCounts == rhs.pDescriptorCounts );
#  endif
    }

    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType              = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
    const void *     pNext              = {};
    uint32_t         descriptorSetCount = {};
    const uint32_t * pDescriptorCounts  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetVariableDescriptorCountAllocateInfo>
  {
    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
  {
    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
  };

  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;

  // wrapper struct for struct VkDescriptorSetVariableDescriptorCountLayoutSupport, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupport.html
  struct DescriptorSetVariableDescriptorCountLayoutSupport
  {
    using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {},
                                                                            void *   pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxVariableDescriptorCount{ maxVariableDescriptorCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
    {
    }

    DescriptorSetVariableDescriptorCountLayoutSupport &
      operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
      return *this;
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxVariableDescriptorCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
#else
    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
#  endif
    }

    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
    void *        pNext                      = {};
    uint32_t      maxVariableDescriptorCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorSetVariableDescriptorCountLayoutSupport>
  {
    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
  {
    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
  };

  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;

  // wrapper struct for struct VkDescriptorUpdateTemplateEntry, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateEntry.html
  struct DescriptorUpdateTemplateEntry
  {
    using NativeType = VkDescriptorUpdateTemplateEntry;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t       dstBinding_      = {},
                                                        uint32_t       dstArrayElement_ = {},
                                                        uint32_t       descriptorCount_ = {},
                                                        DescriptorType descriptorType_  = DescriptorType::eSampler,
                                                        size_t         offset_          = {},
                                                        size_t         stride_          = {} ) VULKAN_HPP_NOEXCEPT
      : dstBinding{ dstBinding_ }
      , dstArrayElement{ dstArrayElement_ }
      , descriptorCount{ descriptorCount_ }
      , descriptorType{ descriptorType_ }
      , offset{ offset_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
    {
    }

    DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry *>( this );
    }

    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry *>( this );
    }

    operator VkDescriptorUpdateTemplateEntry const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorUpdateTemplateEntry *>( this );
    }

    operator VkDescriptorUpdateTemplateEntry *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorUpdateTemplateEntry *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, DescriptorType const &, size_t const &, size_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default;
#else
    bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) &&
             ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride );
#  endif
    }

    bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t       dstBinding      = {};
    uint32_t       dstArrayElement = {};
    uint32_t       descriptorCount = {};
    DescriptorType descriptorType  = DescriptorType::eSampler;
    size_t         offset          = {};
    size_t         stride          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorUpdateTemplateEntry>
  {
    using Type = DescriptorUpdateTemplateEntry;
  };
#endif
  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;

  // wrapper struct for struct VkDescriptorUpdateTemplateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateCreateInfo.html
  struct DescriptorUpdateTemplateCreateInfo
  {
    using NativeType = VkDescriptorUpdateTemplateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDescriptorUpdateTemplateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags   flags_                      = {},
                                                             uint32_t                              descriptorUpdateEntryCount_ = {},
                                                             const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_   = {},
                                                             DescriptorUpdateTemplateType          templateType_ = DescriptorUpdateTemplateType::eDescriptorSet,
                                                             DescriptorSetLayout                   descriptorSetLayout_ = {},
                                                             PipelineBindPoint                     pipelineBindPoint_   = PipelineBindPoint::eGraphics,
                                                             PipelineLayout                        pipelineLayout_      = {},
                                                             uint32_t                              set_                 = {},
                                                             const void *                          pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ }
      , pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ }
      , templateType{ templateType_ }
      , descriptorSetLayout{ descriptorSetLayout_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , pipelineLayout{ pipelineLayout_ }
      , set{ set_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags                                  flags_,
                                        ArrayProxyNoTemporaries<const DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_,
                                        DescriptorUpdateTemplateType templateType_        = DescriptorUpdateTemplateType::eDescriptorSet,
                                        DescriptorSetLayout          descriptorSetLayout_ = {},
                                        PipelineBindPoint            pipelineBindPoint_   = PipelineBindPoint::eGraphics,
                                        PipelineLayout               pipelineLayout_      = {},
                                        uint32_t                     set_                 = {},
                                        const void *                 pNext_               = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) )
      , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() )
      , templateType( templateType_ )
      , descriptorSetLayout( descriptorSetLayout_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , pipelineLayout( pipelineLayout_ )
      , set( set_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags( DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
      setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorUpdateTemplateCreateInfo &
      setDescriptorUpdateEntries( ArrayProxyNoTemporaries<const DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
      pDescriptorUpdateEntries   = descriptorUpdateEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType( DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
    {
      templateType = templateType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetLayout = descriptorSetLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineLayout = pipelineLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
    {
      set = set_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

    operator VkDescriptorUpdateTemplateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

    operator VkDescriptorUpdateTemplateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DescriptorUpdateTemplateCreateFlags const &,
               uint32_t const &,
               const DescriptorUpdateTemplateEntry * const &,
               DescriptorUpdateTemplateType const &,
               DescriptorSetLayout const &,
               PipelineBindPoint const &,
               PipelineLayout const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) &&
             ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) &&
             ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) &&
             ( set == rhs.set );
#  endif
    }

    bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                      = StructureType::eDescriptorUpdateTemplateCreateInfo;
    const void *                          pNext                      = {};
    DescriptorUpdateTemplateCreateFlags   flags                      = {};
    uint32_t                              descriptorUpdateEntryCount = {};
    const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries   = {};
    DescriptorUpdateTemplateType          templateType               = DescriptorUpdateTemplateType::eDescriptorSet;
    DescriptorSetLayout                   descriptorSetLayout        = {};
    PipelineBindPoint                     pipelineBindPoint          = PipelineBindPoint::eGraphics;
    PipelineLayout                        pipelineLayout             = {};
    uint32_t                              set                        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDescriptorUpdateTemplateCreateInfo>
  {
    using Type = DescriptorUpdateTemplateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
  {
    using Type = DescriptorUpdateTemplateCreateInfo;
  };

  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;

  // wrapper struct for struct VkDeviceAddressBindingCallbackDataEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingCallbackDataEXT.html
  struct DeviceAddressBindingCallbackDataEXT
  {
    using NativeType = VkDeviceAddressBindingCallbackDataEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceAddressBindingCallbackDataEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingFlagsEXT flags_       = {},
                                                              DeviceAddress                baseAddress_ = {},
                                                              DeviceSize                   size_        = {},
                                                              DeviceAddressBindingTypeEXT  bindingType_ = DeviceAddressBindingTypeEXT::eBind,
                                                              void *                       pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , baseAddress{ baseAddress_ }
      , size{ size_ }
      , bindingType{ bindingType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs ) )
    {
    }

    DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceAddressBindingCallbackDataEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      baseAddress = baseAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBindingType( DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingType = bindingType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT *>( this );
    }

    operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT *>( this );
    }

    operator VkDeviceAddressBindingCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceAddressBindingCallbackDataEXT *>( this );
    }

    operator VkDeviceAddressBindingCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceAddressBindingCallbackDataEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               DeviceAddressBindingFlagsEXT const &,
               DeviceAddress const &,
               DeviceSize const &,
               DeviceAddressBindingTypeEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, baseAddress, size, bindingType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default;
#else
    bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) &&
             ( bindingType == rhs.bindingType );
#  endif
    }

    bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType       = StructureType::eDeviceAddressBindingCallbackDataEXT;
    void *                       pNext       = {};
    DeviceAddressBindingFlagsEXT flags       = {};
    DeviceAddress                baseAddress = {};
    DeviceSize                   size        = {};
    DeviceAddressBindingTypeEXT  bindingType = DeviceAddressBindingTypeEXT::eBind;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceAddressBindingCallbackDataEXT>
  {
    using Type = DeviceAddressBindingCallbackDataEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceAddressBindingCallbackDataEXT>
  {
    using Type = DeviceAddressBindingCallbackDataEXT;
  };

  // wrapper struct for struct VkDeviceBufferMemoryRequirements, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceBufferMemoryRequirements.html
  struct DeviceBufferMemoryRequirements
  {
    using NativeType = VkDeviceBufferMemoryRequirements;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceBufferMemoryRequirements;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const BufferCreateInfo * pCreateInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pCreateInfo{ pCreateInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
    {
    }

    DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pCreateInfo = pCreateInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( this );
    }

    operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceBufferMemoryRequirements *>( this );
    }

    operator VkDeviceBufferMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( this );
    }

    operator VkDeviceBufferMemoryRequirements *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceBufferMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const BufferCreateInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pCreateInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default;
#else
    bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo );
#  endif
    }

    bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType       = StructureType::eDeviceBufferMemoryRequirements;
    const void *             pNext       = {};
    const BufferCreateInfo * pCreateInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceBufferMemoryRequirements>
  {
    using Type = DeviceBufferMemoryRequirements;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
  {
    using Type = DeviceBufferMemoryRequirements;
  };

  using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;

  // wrapper struct for struct VkDeviceQueueCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateInfo.html
  struct DeviceQueueCreateInfo
  {
    using NativeType = VkDeviceQueueCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceQueueCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_            = {},
                                                uint32_t               queueFamilyIndex_ = {},
                                                uint32_t               queueCount_       = {},
                                                const float *          pQueuePriorities_ = {},
                                                const void *           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
      , queueCount{ queueCount_ }
      , pQueuePriorities{ pQueuePriorities_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceQueueCreateInfo( DeviceQueueCreateFlags                       flags_,
                           uint32_t                                     queueFamilyIndex_,
                           ArrayProxyNoTemporaries<const float> const & queuePriorities_,
                           const void *                                 pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , queueFamilyIndex( queueFamilyIndex_ )
      , queueCount( static_cast<uint32_t>( queuePriorities_.size() ) )
      , pQueuePriorities( queuePriorities_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCount = queueCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueuePriorities = pQueuePriorities_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceQueueCreateInfo & setQueuePriorities( ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCount       = static_cast<uint32_t>( queuePriorities_.size() );
      pQueuePriorities = queuePriorities_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueCreateInfo *>( this );
    }

    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueCreateInfo *>( this );
    }

    operator VkDeviceQueueCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceQueueCreateInfo *>( this );
    }

    operator VkDeviceQueueCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceQueueCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &, const float * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
#else
    bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
             ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities );
#  endif
    }

    bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType            = StructureType::eDeviceQueueCreateInfo;
    const void *           pNext            = {};
    DeviceQueueCreateFlags flags            = {};
    uint32_t               queueFamilyIndex = {};
    uint32_t               queueCount       = {};
    const float *          pQueuePriorities = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceQueueCreateInfo>
  {
    using Type = DeviceQueueCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
  {
    using Type = DeviceQueueCreateInfo;
  };

  // wrapper struct for struct VkPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures.html
  struct PhysicalDeviceFeatures
  {
    using NativeType = VkPhysicalDeviceFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( Bool32 robustBufferAccess_                      = {},
                                                 Bool32 fullDrawIndexUint32_                     = {},
                                                 Bool32 imageCubeArray_                          = {},
                                                 Bool32 independentBlend_                        = {},
                                                 Bool32 geometryShader_                          = {},
                                                 Bool32 tessellationShader_                      = {},
                                                 Bool32 sampleRateShading_                       = {},
                                                 Bool32 dualSrcBlend_                            = {},
                                                 Bool32 logicOp_                                 = {},
                                                 Bool32 multiDrawIndirect_                       = {},
                                                 Bool32 drawIndirectFirstInstance_               = {},
                                                 Bool32 depthClamp_                              = {},
                                                 Bool32 depthBiasClamp_                          = {},
                                                 Bool32 fillModeNonSolid_                        = {},
                                                 Bool32 depthBounds_                             = {},
                                                 Bool32 wideLines_                               = {},
                                                 Bool32 largePoints_                             = {},
                                                 Bool32 alphaToOne_                              = {},
                                                 Bool32 multiViewport_                           = {},
                                                 Bool32 samplerAnisotropy_                       = {},
                                                 Bool32 textureCompressionETC2_                  = {},
                                                 Bool32 textureCompressionASTC_LDR_              = {},
                                                 Bool32 textureCompressionBC_                    = {},
                                                 Bool32 occlusionQueryPrecise_                   = {},
                                                 Bool32 pipelineStatisticsQuery_                 = {},
                                                 Bool32 vertexPipelineStoresAndAtomics_          = {},
                                                 Bool32 fragmentStoresAndAtomics_                = {},
                                                 Bool32 shaderTessellationAndGeometryPointSize_  = {},
                                                 Bool32 shaderImageGatherExtended_               = {},
                                                 Bool32 shaderStorageImageExtendedFormats_       = {},
                                                 Bool32 shaderStorageImageMultisample_           = {},
                                                 Bool32 shaderStorageImageReadWithoutFormat_     = {},
                                                 Bool32 shaderStorageImageWriteWithoutFormat_    = {},
                                                 Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
                                                 Bool32 shaderSampledImageArrayDynamicIndexing_  = {},
                                                 Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
                                                 Bool32 shaderStorageImageArrayDynamicIndexing_  = {},
                                                 Bool32 shaderClipDistance_                      = {},
                                                 Bool32 shaderCullDistance_                      = {},
                                                 Bool32 shaderFloat64_                           = {},
                                                 Bool32 shaderInt64_                             = {},
                                                 Bool32 shaderInt16_                             = {},
                                                 Bool32 shaderResourceResidency_                 = {},
                                                 Bool32 shaderResourceMinLod_                    = {},
                                                 Bool32 sparseBinding_                           = {},
                                                 Bool32 sparseResidencyBuffer_                   = {},
                                                 Bool32 sparseResidencyImage2D_                  = {},
                                                 Bool32 sparseResidencyImage3D_                  = {},
                                                 Bool32 sparseResidency2Samples_                 = {},
                                                 Bool32 sparseResidency4Samples_                 = {},
                                                 Bool32 sparseResidency8Samples_                 = {},
                                                 Bool32 sparseResidency16Samples_                = {},
                                                 Bool32 sparseResidencyAliased_                  = {},
                                                 Bool32 variableMultisampleRate_                 = {},
                                                 Bool32 inheritedQueries_                        = {} ) VULKAN_HPP_NOEXCEPT
      : robustBufferAccess{ robustBufferAccess_ }
      , fullDrawIndexUint32{ fullDrawIndexUint32_ }
      , imageCubeArray{ imageCubeArray_ }
      , independentBlend{ independentBlend_ }
      , geometryShader{ geometryShader_ }
      , tessellationShader{ tessellationShader_ }
      , sampleRateShading{ sampleRateShading_ }
      , dualSrcBlend{ dualSrcBlend_ }
      , logicOp{ logicOp_ }
      , multiDrawIndirect{ multiDrawIndirect_ }
      , drawIndirectFirstInstance{ drawIndirectFirstInstance_ }
      , depthClamp{ depthClamp_ }
      , depthBiasClamp{ depthBiasClamp_ }
      , fillModeNonSolid{ fillModeNonSolid_ }
      , depthBounds{ depthBounds_ }
      , wideLines{ wideLines_ }
      , largePoints{ largePoints_ }
      , alphaToOne{ alphaToOne_ }
      , multiViewport{ multiViewport_ }
      , samplerAnisotropy{ samplerAnisotropy_ }
      , textureCompressionETC2{ textureCompressionETC2_ }
      , textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ }
      , textureCompressionBC{ textureCompressionBC_ }
      , occlusionQueryPrecise{ occlusionQueryPrecise_ }
      , pipelineStatisticsQuery{ pipelineStatisticsQuery_ }
      , vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ }
      , fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ }
      , shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ }
      , shaderImageGatherExtended{ shaderImageGatherExtended_ }
      , shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ }
      , shaderStorageImageMultisample{ shaderStorageImageMultisample_ }
      , shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ }
      , shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ }
      , shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ }
      , shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ }
      , shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ }
      , shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ }
      , shaderClipDistance{ shaderClipDistance_ }
      , shaderCullDistance{ shaderCullDistance_ }
      , shaderFloat64{ shaderFloat64_ }
      , shaderInt64{ shaderInt64_ }
      , shaderInt16{ shaderInt16_ }
      , shaderResourceResidency{ shaderResourceResidency_ }
      , shaderResourceMinLod{ shaderResourceMinLod_ }
      , sparseBinding{ sparseBinding_ }
      , sparseResidencyBuffer{ sparseResidencyBuffer_ }
      , sparseResidencyImage2D{ sparseResidencyImage2D_ }
      , sparseResidencyImage3D{ sparseResidencyImage3D_ }
      , sparseResidency2Samples{ sparseResidency2Samples_ }
      , sparseResidency4Samples{ sparseResidency4Samples_ }
      , sparseResidency8Samples{ sparseResidency8Samples_ }
      , sparseResidency16Samples{ sparseResidency16Samples_ }
      , sparseResidencyAliased{ sparseResidencyAliased_ }
      , variableMultisampleRate{ variableMultisampleRate_ }
      , inheritedQueries{ inheritedQueries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      robustBufferAccess = robustBufferAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
    {
      fullDrawIndexUint32 = fullDrawIndexUint32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCubeArray = imageCubeArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
    {
      independentBlend = independentBlend_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryShader = geometryShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationShader = tessellationShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleRateShading = sampleRateShading_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
    {
      dualSrcBlend = dualSrcBlend_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOp = logicOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
    {
      multiDrawIndirect = multiDrawIndirect_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      drawIndirectFirstInstance = drawIndirectFirstInstance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClamp = depthClamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasClamp = depthBiasClamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
    {
      fillModeNonSolid = fillModeNonSolid_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBounds = depthBounds_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
    {
      wideLines = wideLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
    {
      largePoints = largePoints_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToOne = alphaToOne_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
    {
      multiViewport = multiViewport_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerAnisotropy = samplerAnisotropy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionETC2 = textureCompressionETC2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionBC = textureCompressionBC_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
    {
      occlusionQueryPrecise = occlusionQueryPrecise_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatisticsQuery = pipelineStatisticsQuery_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
      setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageGatherExtended = shaderImageGatherExtended_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageMultisample = shaderStorageImageMultisample_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
      setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
      setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
      setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
      setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderClipDistance = shaderClipDistance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCullDistance = shaderCullDistance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat64 = shaderFloat64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt64 = shaderInt64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt16 = shaderInt16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderResourceResidency = shaderResourceResidency_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderResourceMinLod = shaderResourceMinLod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseBinding = sparseBinding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyBuffer = sparseResidencyBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyImage2D = sparseResidencyImage2D_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyImage3D = sparseResidencyImage3D_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency2Samples = sparseResidency2Samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency4Samples = sparseResidency4Samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency8Samples = sparseResidency8Samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency16Samples = sparseResidency16Samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyAliased = sparseResidencyAliased_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
    {
      variableMultisampleRate = variableMultisampleRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedQueries = inheritedQueries_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFeatures *>( this );
    }

    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFeatures *>( this );
    }

    operator VkPhysicalDeviceFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFeatures *>( this );
    }

    operator VkPhysicalDeviceFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( robustBufferAccess,
                       fullDrawIndexUint32,
                       imageCubeArray,
                       independentBlend,
                       geometryShader,
                       tessellationShader,
                       sampleRateShading,
                       dualSrcBlend,
                       logicOp,
                       multiDrawIndirect,
                       drawIndirectFirstInstance,
                       depthClamp,
                       depthBiasClamp,
                       fillModeNonSolid,
                       depthBounds,
                       wideLines,
                       largePoints,
                       alphaToOne,
                       multiViewport,
                       samplerAnisotropy,
                       textureCompressionETC2,
                       textureCompressionASTC_LDR,
                       textureCompressionBC,
                       occlusionQueryPrecise,
                       pipelineStatisticsQuery,
                       vertexPipelineStoresAndAtomics,
                       fragmentStoresAndAtomics,
                       shaderTessellationAndGeometryPointSize,
                       shaderImageGatherExtended,
                       shaderStorageImageExtendedFormats,
                       shaderStorageImageMultisample,
                       shaderStorageImageReadWithoutFormat,
                       shaderStorageImageWriteWithoutFormat,
                       shaderUniformBufferArrayDynamicIndexing,
                       shaderSampledImageArrayDynamicIndexing,
                       shaderStorageBufferArrayDynamicIndexing,
                       shaderStorageImageArrayDynamicIndexing,
                       shaderClipDistance,
                       shaderCullDistance,
                       shaderFloat64,
                       shaderInt64,
                       shaderInt16,
                       shaderResourceResidency,
                       shaderResourceMinLod,
                       sparseBinding,
                       sparseResidencyBuffer,
                       sparseResidencyImage2D,
                       sparseResidencyImage3D,
                       sparseResidency2Samples,
                       sparseResidency4Samples,
                       sparseResidency8Samples,
                       sparseResidency16Samples,
                       sparseResidencyAliased,
                       variableMultisampleRate,
                       inheritedQueries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) &&
             ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) &&
             ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) &&
             ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) &&
             ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) &&
             ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) &&
             ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) &&
             ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) &&
             ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) &&
             ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) &&
             ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) &&
             ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) &&
             ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) &&
             ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) &&
             ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) &&
             ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) &&
             ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) &&
             ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) &&
             ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) &&
             ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) &&
             ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) &&
             ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) &&
             ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) &&
             ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) &&
             ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) &&
             ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) &&
             ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) &&
             ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries );
#  endif
    }

    bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Bool32 robustBufferAccess                      = {};
    Bool32 fullDrawIndexUint32                     = {};
    Bool32 imageCubeArray                          = {};
    Bool32 independentBlend                        = {};
    Bool32 geometryShader                          = {};
    Bool32 tessellationShader                      = {};
    Bool32 sampleRateShading                       = {};
    Bool32 dualSrcBlend                            = {};
    Bool32 logicOp                                 = {};
    Bool32 multiDrawIndirect                       = {};
    Bool32 drawIndirectFirstInstance               = {};
    Bool32 depthClamp                              = {};
    Bool32 depthBiasClamp                          = {};
    Bool32 fillModeNonSolid                        = {};
    Bool32 depthBounds                             = {};
    Bool32 wideLines                               = {};
    Bool32 largePoints                             = {};
    Bool32 alphaToOne                              = {};
    Bool32 multiViewport                           = {};
    Bool32 samplerAnisotropy                       = {};
    Bool32 textureCompressionETC2                  = {};
    Bool32 textureCompressionASTC_LDR              = {};
    Bool32 textureCompressionBC                    = {};
    Bool32 occlusionQueryPrecise                   = {};
    Bool32 pipelineStatisticsQuery                 = {};
    Bool32 vertexPipelineStoresAndAtomics          = {};
    Bool32 fragmentStoresAndAtomics                = {};
    Bool32 shaderTessellationAndGeometryPointSize  = {};
    Bool32 shaderImageGatherExtended               = {};
    Bool32 shaderStorageImageExtendedFormats       = {};
    Bool32 shaderStorageImageMultisample           = {};
    Bool32 shaderStorageImageReadWithoutFormat     = {};
    Bool32 shaderStorageImageWriteWithoutFormat    = {};
    Bool32 shaderUniformBufferArrayDynamicIndexing = {};
    Bool32 shaderSampledImageArrayDynamicIndexing  = {};
    Bool32 shaderStorageBufferArrayDynamicIndexing = {};
    Bool32 shaderStorageImageArrayDynamicIndexing  = {};
    Bool32 shaderClipDistance                      = {};
    Bool32 shaderCullDistance                      = {};
    Bool32 shaderFloat64                           = {};
    Bool32 shaderInt64                             = {};
    Bool32 shaderInt16                             = {};
    Bool32 shaderResourceResidency                 = {};
    Bool32 shaderResourceMinLod                    = {};
    Bool32 sparseBinding                           = {};
    Bool32 sparseResidencyBuffer                   = {};
    Bool32 sparseResidencyImage2D                  = {};
    Bool32 sparseResidencyImage3D                  = {};
    Bool32 sparseResidency2Samples                 = {};
    Bool32 sparseResidency4Samples                 = {};
    Bool32 sparseResidency8Samples                 = {};
    Bool32 sparseResidency16Samples                = {};
    Bool32 sparseResidencyAliased                  = {};
    Bool32 variableMultisampleRate                 = {};
    Bool32 inheritedQueries                        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFeatures>
  {
    using Type = PhysicalDeviceFeatures;
  };
#endif

  // wrapper struct for struct VkDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceCreateInfo.html
  struct DeviceCreateInfo
  {
    using NativeType = VkDeviceCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )

#  if defined( _MSC_VER )
#    pragma warning( push )
#    pragma warning( disable : 4996 )  // 'function': was declared deprecated
#  elif defined( __clang__ )
#    pragma clang diagnostic push
#    pragma clang diagnostic ignored "-Wdeprecated-declarations"
#  elif defined( __GNUC__ )
#    pragma GCC diagnostic push
#    pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#  else
// unknown compiler... just ignore the warnings for yourselves ;)
#  endif

    VULKAN_HPP_CONSTEXPR_17 DeviceCreateInfo( DeviceCreateFlags              flags_                   = {},
                                              uint32_t                       queueCreateInfoCount_    = {},
                                              const DeviceQueueCreateInfo *  pQueueCreateInfos_       = {},
                                              uint32_t                       enabledLayerCount_       = {},
                                              const char * const *           ppEnabledLayerNames_     = {},
                                              uint32_t                       enabledExtensionCount_   = {},
                                              const char * const *           ppEnabledExtensionNames_ = {},
                                              const PhysicalDeviceFeatures * pEnabledFeatures_        = {},
                                              const void *                   pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , queueCreateInfoCount{ queueCreateInfoCount_ }
      , pQueueCreateInfos{ pQueueCreateInfos_ }
      , enabledLayerCount{ enabledLayerCount_ }
      , ppEnabledLayerNames{ ppEnabledLayerNames_ }
      , enabledExtensionCount{ enabledExtensionCount_ }
      , ppEnabledExtensionNames{ ppEnabledExtensionNames_ }
      , pEnabledFeatures{ pEnabledFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_17 DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo( DeviceCreateFlags                                            flags_,
                      ArrayProxyNoTemporaries<const DeviceQueueCreateInfo> const & queueCreateInfos_,
                      ArrayProxyNoTemporaries<const char * const> const &          pEnabledLayerNames_     = {},
                      ArrayProxyNoTemporaries<const char * const> const &          pEnabledExtensionNames_ = {},
                      const PhysicalDeviceFeatures *                               pEnabledFeatures_       = {},
                      const void *                                                 pNext_                  = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) )
      , pQueueCreateInfos( queueCreateInfos_.data() )
      , enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
      , ppEnabledLayerNames( pEnabledLayerNames_.data() )
      , enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
      , ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
      , pEnabledFeatures( pEnabledFeatures_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

#  if defined( _MSC_VER )
#    pragma warning( pop )
#  elif defined( __clang__ )
#    pragma clang diagnostic pop
#  elif defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  else
// unknown compiler... just ignore the warnings for yourselves ;)
#  endif

#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      pNext                   = rhs.pNext;
      flags                   = static_cast<DeviceCreateFlags>( rhs.flags );
      queueCreateInfoCount    = rhs.queueCreateInfoCount;
      pQueueCreateInfos       = reinterpret_cast<const DeviceQueueCreateInfo *>( rhs.pQueueCreateInfos );
      enabledExtensionCount   = rhs.enabledExtensionCount;
      ppEnabledExtensionNames = rhs.ppEnabledExtensionNames;
      pEnabledFeatures        = reinterpret_cast<const PhysicalDeviceFeatures *>( rhs.pEnabledFeatures );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCreateInfoCount = queueCreateInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos( const DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueCreateInfos = pQueueCreateInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo & setQueueCreateInfos( ArrayProxyNoTemporaries<const DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
      pQueueCreateInfos    = queueCreateInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_DEPRECATED( "ignored" ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      detail::ignore( enabledLayerCount_ );
      return *this;
    }

    VULKAN_HPP_DEPRECATED( "ignored" )
    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      detail::ignore( ppEnabledLayerNames_ );
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VULKAN_HPP_DEPRECATED( "ignored" )
    DeviceCreateInfo & setPEnabledLayerNames( ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      detail::ignore( pEnabledLayerNames_ );
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount = enabledExtensionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledExtensionNames = ppEnabledExtensionNames_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo & setPEnabledExtensionNames( ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount   = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pEnabledFeatures = pEnabledFeatures_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceCreateInfo *>( this );
    }

    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceCreateInfo *>( this );
    }

    operator VkDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceCreateInfo *>( this );
    }

    operator VkDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DeviceCreateFlags const &,
               uint32_t const &,
               const DeviceQueueCreateInfo * const &,
               uint32_t const &,
               const char * const * const &,
               uint32_t const &,
               const char * const * const &,
               const PhysicalDeviceFeatures * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       queueCreateInfoCount,
                       pQueueCreateInfos,
                       enabledLayerCount,
                       ppEnabledLayerNames,
                       enabledExtensionCount,
                       ppEnabledExtensionNames,
                       pEnabledFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 )
        return cmp;
      if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < enabledExtensionCount; ++i )
      {
        if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
          if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
      }
      if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) &&
             ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
             std::equal( ppEnabledExtensionNames,
                         ppEnabledExtensionNames + enabledExtensionCount,
                         rhs.ppEnabledExtensionNames,
                         []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) &&
             ( pEnabledFeatures == rhs.pEnabledFeatures );
    }

    bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                 sType                                         = StructureType::eDeviceCreateInfo;
    const void *                  pNext                                         = {};
    DeviceCreateFlags             flags                                         = {};
    uint32_t                      queueCreateInfoCount                          = {};
    const DeviceQueueCreateInfo * pQueueCreateInfos                             = {};
    VULKAN_HPP_DEPRECATED( "ignored" ) uint32_t enabledLayerCount               = {};
    VULKAN_HPP_DEPRECATED( "ignored" ) const char * const * ppEnabledLayerNames = {};
    uint32_t                       enabledExtensionCount                        = {};
    const char * const *           ppEnabledExtensionNames                      = {};
    const PhysicalDeviceFeatures * pEnabledFeatures                             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceCreateInfo>
  {
    using Type = DeviceCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
  {
    using Type = DeviceCreateInfo;
  };

  // wrapper struct for struct VkDeviceMemoryReportCallbackDataEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportCallbackDataEXT.html
  struct DeviceMemoryReportCallbackDataEXT
  {
    using NativeType = VkDeviceMemoryReportCallbackDataEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceMemoryReportCallbackDataEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportFlagsEXT     flags_          = {},
                                                            DeviceMemoryReportEventTypeEXT type_           = DeviceMemoryReportEventTypeEXT::eAllocate,
                                                            uint64_t                       memoryObjectId_ = {},
                                                            DeviceSize                     size_           = {},
                                                            ObjectType                     objectType_     = ObjectType::eUnknown,
                                                            uint64_t                       objectHandle_   = {},
                                                            uint32_t                       heapIndex_      = {},
                                                            void *                         pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , type{ type_ }
      , memoryObjectId{ memoryObjectId_ }
      , size{ size_ }
      , objectType{ objectType_ }
      , objectHandle{ objectHandle_ }
      , heapIndex{ heapIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
    {
    }

    DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs );
      return *this;
    }

    operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

    operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

    operator VkDeviceMemoryReportCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

    operator VkDeviceMemoryReportCallbackDataEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               DeviceMemoryReportFlagsEXT const &,
               DeviceMemoryReportEventTypeEXT const &,
               uint64_t const &,
               DeviceSize const &,
               ObjectType const &,
               uint64_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default;
#else
    bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) &&
             ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex );
#  endif
    }

    bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType          = StructureType::eDeviceMemoryReportCallbackDataEXT;
    void *                         pNext          = {};
    DeviceMemoryReportFlagsEXT     flags          = {};
    DeviceMemoryReportEventTypeEXT type           = DeviceMemoryReportEventTypeEXT::eAllocate;
    uint64_t                       memoryObjectId = {};
    DeviceSize                     size           = {};
    ObjectType                     objectType     = ObjectType::eUnknown;
    uint64_t                       objectHandle   = {};
    uint32_t                       heapIndex      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceMemoryReportCallbackDataEXT>
  {
    using Type = DeviceMemoryReportCallbackDataEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
  {
    using Type = DeviceMemoryReportCallbackDataEXT;
  };

  typedef void( VKAPI_PTR * PFN_DeviceMemoryReportCallbackEXT )( const DeviceMemoryReportCallbackDataEXT * pCallbackData, void * pUserData );

  // wrapper struct for struct VkDeviceDeviceMemoryReportCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDeviceMemoryReportCreateInfoEXT.html
  struct DeviceDeviceMemoryReportCreateInfoEXT
  {
    using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceMemoryReportFlagsEXT        flags_           = {},
                                                                PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {},
                                                                void *                            pUserData_       = {},
                                                                const void *                      pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pfnUserCallback{ pfnUserCallback_ }
      , pUserData{ pUserData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." )

    DeviceDeviceMemoryReportCreateInfoEXT( DeviceMemoryReportFlagsEXT          flags_,
                                           PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_,
                                           void *                              pUserData_ = {},
                                           const void *                        pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : DeviceDeviceMemoryReportCreateInfoEXT( flags_, reinterpret_cast<PFN_DeviceMemoryReportCallbackEXT>( pfnUserCallback_ ), pUserData_, pNext_ )
    {
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif

    DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnUserCallback = pfnUserCallback_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic push
#    if defined( __clang__ )
#      pragma clang diagnostic ignored "-Wunknown-warning-option"
#    endif
#    pragma GCC diagnostic ignored "-Wcast-function-type"
#  endif
    VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." )

    DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      return setPfnUserCallback( reinterpret_cast<PFN_DeviceMemoryReportCallbackEXT>( pfnUserCallback_ ) );
    }
#  if defined( __clang__ ) || defined( __GNUC__ )
#    pragma GCC diagnostic pop
#  endif
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

    operator VkDeviceDeviceMemoryReportCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

    operator VkDeviceDeviceMemoryReportCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemoryReportFlagsEXT const &, PFN_DeviceMemoryReportCallbackEXT const &, void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pfnUserCallback, pUserData );
    }
#endif

    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) &&
             ( pUserData == rhs.pUserData );
#endif
    }

    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                     sType           = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
    const void *                      pNext           = {};
    DeviceMemoryReportFlagsEXT        flags           = {};
    PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {};
    void *                            pUserData       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceDeviceMemoryReportCreateInfoEXT>
  {
    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
  {
    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
  };

  // wrapper struct for struct VkDeviceDiagnosticsConfigCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigCreateInfoNV.html
  struct DeviceDiagnosticsConfigCreateInfoNV
  {
    using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
    {
    }

    DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

    operator VkDeviceDiagnosticsConfigCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

    operator VkDeviceDiagnosticsConfigCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceDiagnosticsConfigFlagsNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default;
#else
    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
    const void *                   pNext = {};
    DeviceDiagnosticsConfigFlagsNV flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceDiagnosticsConfigCreateInfoNV>
  {
    using Type = DeviceDiagnosticsConfigCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
  {
    using Type = DeviceDiagnosticsConfigCreateInfoNV;
  };

  // wrapper struct for struct VkDeviceEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceEventInfoEXT.html
  struct DeviceEventInfoEXT
  {
    using NativeType = VkDeviceEventInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceEventInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug,
                                             const void *       pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceEvent{ deviceEvent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) ) {}

    DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceEvent = deviceEvent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceEventInfoEXT *>( this );
    }

    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceEventInfoEXT *>( this );
    }

    operator VkDeviceEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceEventInfoEXT *>( this );
    }

    operator VkDeviceEventInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceEventInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceEventTypeEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceEvent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceEventInfoEXT const & ) const = default;
#else
    bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent );
#  endif
    }

    bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType       = StructureType::eDeviceEventInfoEXT;
    const void *       pNext       = {};
    DeviceEventTypeEXT deviceEvent = DeviceEventTypeEXT::eDisplayHotplug;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceEventInfoEXT>
  {
    using Type = DeviceEventInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
  {
    using Type = DeviceEventInfoEXT;
  };

  // wrapper struct for struct VkDeviceFaultAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultAddressInfoEXT.html
  struct DeviceFaultAddressInfoEXT
  {
    using NativeType = VkDeviceFaultAddressInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressTypeEXT addressType_      = DeviceFaultAddressTypeEXT::eNone,
                                                    DeviceAddress             reportedAddress_  = {},
                                                    DeviceSize                addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT
      : addressType{ addressType_ }
      , reportedAddress{ reportedAddress_ }
      , addressPrecision{ addressPrecision_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceFaultAddressInfoEXT( *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs ) )
    {
    }

    DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceFaultAddressInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT
    {
      addressType = addressType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      reportedAddress = reportedAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT
    {
      addressPrecision = addressPrecision_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceFaultAddressInfoEXT *>( this );
    }

    operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceFaultAddressInfoEXT *>( this );
    }

    operator VkDeviceFaultAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceFaultAddressInfoEXT *>( this );
    }

    operator VkDeviceFaultAddressInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceFaultAddressInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceFaultAddressTypeEXT const &, DeviceAddress const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( addressType, reportedAddress, addressPrecision );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default;
#else
    bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision );
#  endif
    }

    bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceFaultAddressTypeEXT addressType      = DeviceFaultAddressTypeEXT::eNone;
    DeviceAddress             reportedAddress  = {};
    DeviceSize                addressPrecision = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceFaultAddressInfoEXT>
  {
    using Type = DeviceFaultAddressInfoEXT;
  };
#endif

  // wrapper struct for struct VkDeviceFaultCountsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultCountsEXT.html
  struct DeviceFaultCountsEXT
  {
    using NativeType = VkDeviceFaultCountsEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceFaultCountsEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t   addressInfoCount_ = {},
                                               uint32_t   vendorInfoCount_  = {},
                                               DeviceSize vendorBinarySize_ = {},
                                               void *     pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , addressInfoCount{ addressInfoCount_ }
      , vendorInfoCount{ vendorInfoCount_ }
      , vendorBinarySize{ vendorBinarySize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceFaultCountsEXT( *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs ) )
    {
    }

    DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceFaultCountsEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      addressInfoCount = addressInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorInfoCount = vendorInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorBinarySize = vendorBinarySize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceFaultCountsEXT *>( this );
    }

    operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceFaultCountsEXT *>( this );
    }

    operator VkDeviceFaultCountsEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceFaultCountsEXT *>( this );
    }

    operator VkDeviceFaultCountsEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceFaultCountsEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceFaultCountsEXT const & ) const = default;
#else
    bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) &&
             ( vendorBinarySize == rhs.vendorBinarySize );
#  endif
    }

    bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::eDeviceFaultCountsEXT;
    void *        pNext            = {};
    uint32_t      addressInfoCount = {};
    uint32_t      vendorInfoCount  = {};
    DeviceSize    vendorBinarySize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceFaultCountsEXT>
  {
    using Type = DeviceFaultCountsEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceFaultCountsEXT>
  {
    using Type = DeviceFaultCountsEXT;
  };

  // wrapper struct for struct VkDeviceFaultVendorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorInfoEXT.html
  struct DeviceFaultVendorInfoEXT
  {
    using NativeType = VkDeviceFaultVendorInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_     = {},
                                                      uint64_t                                          vendorFaultCode_ = {},
                                                      uint64_t                                          vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT
      : description{ description_ }
      , vendorFaultCode{ vendorFaultCode_ }
      , vendorFaultData{ vendorFaultData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceFaultVendorInfoEXT( *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} )
      : vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ )
    {
      VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
#    if defined( _WIN32 )
      strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
#    else
      strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
#    endif
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceFaultVendorInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array<char, VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
    {
      description = description_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
#    if defined( _WIN32 )
      strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
#    else
      strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
#    endif
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorFaultCode = vendorFaultCode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorFaultData = vendorFaultData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceFaultVendorInfoEXT *>( this );
    }

    operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceFaultVendorInfoEXT *>( this );
    }

    operator VkDeviceFaultVendorInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceFaultVendorInfoEXT *>( this );
    }

    operator VkDeviceFaultVendorInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceFaultVendorInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( description, vendorFaultCode, vendorFaultData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 )
        return cmp;
      if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( strcmp( description, rhs.description ) == 0 ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData );
    }

    bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description     = {};
    uint64_t                                      vendorFaultCode = {};
    uint64_t                                      vendorFaultData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceFaultVendorInfoEXT>
  {
    using Type = DeviceFaultVendorInfoEXT;
  };
#endif

  // wrapper struct for struct VkDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultInfoEXT.html
  struct DeviceFaultInfoEXT
  {
    using NativeType = VkDeviceFaultInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceFaultInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_       = {},
                                                DeviceFaultAddressInfoEXT *                       pAddressInfos_     = {},
                                                DeviceFaultVendorInfoEXT *                        pVendorInfos_      = {},
                                                void *                                            pVendorBinaryData_ = {},
                                                void *                                            pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , description{ description_ }
      , pAddressInfos{ pAddressInfos_ }
      , pVendorInfos{ pVendorInfos_ }
      , pVendorBinaryData{ pVendorBinaryData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs ) ) {}

    DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceFaultInfoEXT const *>( &rhs );
      return *this;
    }

    operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceFaultInfoEXT *>( this );
    }

    operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceFaultInfoEXT *>( this );
    }

    operator VkDeviceFaultInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceFaultInfoEXT *>( this );
    }

    operator VkDeviceFaultInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceFaultInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               DeviceFaultAddressInfoEXT * const &,
               DeviceFaultVendorInfoEXT * const &,
               void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 )
        return cmp;
      if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 )
        return cmp;
      if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( description, rhs.description ) == 0 ) && ( pAddressInfos == rhs.pAddressInfos ) &&
             ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData );
    }

    bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType             = StructureType::eDeviceFaultInfoEXT;
    void *                                        pNext             = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description       = {};
    DeviceFaultAddressInfoEXT *                   pAddressInfos     = {};
    DeviceFaultVendorInfoEXT *                    pVendorInfos      = {};
    void *                                        pVendorBinaryData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceFaultInfoEXT>
  {
    using Type = DeviceFaultInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceFaultInfoEXT>
  {
    using Type = DeviceFaultInfoEXT;
  };

  // wrapper struct for struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorBinaryHeaderVersionOneEXT.html
  struct DeviceFaultVendorBinaryHeaderVersionOneEXT
  {
    using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      DeviceFaultVendorBinaryHeaderVersionOneEXT( uint32_t                                  headerSize_        = {},
                                                  DeviceFaultVendorBinaryHeaderVersionEXT   headerVersion_     = DeviceFaultVendorBinaryHeaderVersionEXT::eOne,
                                                  uint32_t                                  vendorID_          = {},
                                                  uint32_t                                  deviceID_          = {},
                                                  uint32_t                                  driverVersion_     = {},
                                                  std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
                                                  uint32_t                                  applicationNameOffset_ = {},
                                                  uint32_t                                  applicationVersion_    = {},
                                                  uint32_t                                  engineNameOffset_      = {},
                                                  uint32_t                                  engineVersion_         = {},
                                                  uint32_t                                  apiVersion_            = {} ) VULKAN_HPP_NOEXCEPT
      : headerSize{ headerSize_ }
      , headerVersion{ headerVersion_ }
      , vendorID{ vendorID_ }
      , deviceID{ deviceID_ }
      , driverVersion{ driverVersion_ }
      , pipelineCacheUUID{ pipelineCacheUUID_ }
      , applicationNameOffset{ applicationNameOffset_ }
      , applicationVersion{ applicationVersion_ }
      , engineNameOffset{ engineNameOffset_ }
      , engineVersion{ engineVersion_ }
      , apiVersion{ apiVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs ) )
    {
    }

    DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceFaultVendorBinaryHeaderVersionOneEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
    {
      headerSize = headerSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
      setHeaderVersion( DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      headerVersion = headerVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorID = vendorID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceID = deviceID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      driverVersion = driverVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT &
      setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCacheUUID = pipelineCacheUUID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      applicationNameOffset = applicationNameOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      applicationVersion = applicationVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      engineNameOffset = engineNameOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      engineVersion = engineVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      apiVersion = apiVersion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
    }

    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
    }

    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
    }

    operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceFaultVendorBinaryHeaderVersionOneEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               DeviceFaultVendorBinaryHeaderVersionEXT const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( headerSize,
                       headerVersion,
                       vendorID,
                       deviceID,
                       driverVersion,
                       pipelineCacheUUID,
                       applicationNameOffset,
                       applicationVersion,
                       engineNameOffset,
                       engineVersion,
                       apiVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default;
#else
    bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
             ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
             ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) &&
             ( engineNameOffset == rhs.engineNameOffset ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
#  endif
    }

    bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                headerSize            = {};
    DeviceFaultVendorBinaryHeaderVersionEXT headerVersion         = DeviceFaultVendorBinaryHeaderVersionEXT::eOne;
    uint32_t                                vendorID              = {};
    uint32_t                                deviceID              = {};
    uint32_t                                driverVersion         = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE>   pipelineCacheUUID     = {};
    uint32_t                                applicationNameOffset = {};
    uint32_t                                applicationVersion    = {};
    uint32_t                                engineNameOffset      = {};
    uint32_t                                engineVersion         = {};
    uint32_t                                apiVersion            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceFaultVendorBinaryHeaderVersionOneEXT>
  {
    using Type = DeviceFaultVendorBinaryHeaderVersionOneEXT;
  };
#endif

  // wrapper struct for struct VkDeviceGroupBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupBindSparseInfo.html
  struct DeviceGroupBindSparseInfo
  {
    using NativeType = VkDeviceGroupBindSparseInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupBindSparseInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , resourceDeviceIndex{ resourceDeviceIndex_ }
      , memoryDeviceIndex{ memoryDeviceIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
    {
    }

    DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceDeviceIndex = resourceDeviceIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryDeviceIndex = memoryDeviceIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo *>( this );
    }

    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupBindSparseInfo *>( this );
    }

    operator VkDeviceGroupBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupBindSparseInfo *>( this );
    }

    operator VkDeviceGroupBindSparseInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupBindSparseInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default;
#else
    bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) &&
             ( memoryDeviceIndex == rhs.memoryDeviceIndex );
#  endif
    }

    bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eDeviceGroupBindSparseInfo;
    const void *  pNext               = {};
    uint32_t      resourceDeviceIndex = {};
    uint32_t      memoryDeviceIndex   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupBindSparseInfo>
  {
    using Type = DeviceGroupBindSparseInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
  {
    using Type = DeviceGroupBindSparseInfo;
  };

  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;

  // wrapper struct for struct VkDeviceGroupCommandBufferBeginInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupCommandBufferBeginInfo.html
  struct DeviceGroupCommandBufferBeginInfo
  {
    using NativeType = VkDeviceGroupCommandBufferBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupCommandBufferBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceMask{ deviceMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
    {
    }

    DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

    operator VkDeviceGroupCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

    operator VkDeviceGroupCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
#else
    bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask );
#  endif
    }

    bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eDeviceGroupCommandBufferBeginInfo;
    const void *  pNext      = {};
    uint32_t      deviceMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupCommandBufferBeginInfo>
  {
    using Type = DeviceGroupCommandBufferBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
  {
    using Type = DeviceGroupCommandBufferBeginInfo;
  };

  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;

  // wrapper struct for struct VkDeviceGroupDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupDeviceCreateInfo.html
  struct DeviceGroupDeviceCreateInfo
  {
    using NativeType = VkDeviceGroupDeviceCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupDeviceCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t               physicalDeviceCount_ = {},
                                                      const PhysicalDevice * pPhysicalDevices_    = {},
                                                      const void *           pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , physicalDeviceCount{ physicalDeviceCount_ }
      , pPhysicalDevices{ pPhysicalDevices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupDeviceCreateInfo( ArrayProxyNoTemporaries<const PhysicalDevice> const & physicalDevices_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      physicalDeviceCount = physicalDeviceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
    {
      pPhysicalDevices = pPhysicalDevices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupDeviceCreateInfo & setPhysicalDevices( ArrayProxyNoTemporaries<const PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
    {
      physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
      pPhysicalDevices    = physicalDevices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>( this );
    }

    operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>( this );
    }

    operator VkDeviceGroupDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>( this );
    }

    operator VkDeviceGroupDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PhysicalDevice * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
#else
    bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
             ( pPhysicalDevices == rhs.pPhysicalDevices );
#  endif
    }

    bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType               = StructureType::eDeviceGroupDeviceCreateInfo;
    const void *           pNext               = {};
    uint32_t               physicalDeviceCount = {};
    const PhysicalDevice * pPhysicalDevices    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupDeviceCreateInfo>
  {
    using Type = DeviceGroupDeviceCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
  {
    using Type = DeviceGroupDeviceCreateInfo;
  };

  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;

  // wrapper struct for struct VkDeviceGroupPresentCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentCapabilitiesKHR.html
  struct DeviceGroupPresentCapabilitiesKHR
  {
    using NativeType = VkDeviceGroupPresentCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupPresentCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {},
                                                               DeviceGroupPresentModeFlagsKHR                         modes_       = {},
                                                               void *                                                 pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentMask{ presentMask_ }
      , modes{ modes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
    {
    }

    DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

    operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

    operator VkDeviceGroupPresentCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

    operator VkDeviceGroupPresentCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const &, DeviceGroupPresentModeFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentMask, modes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
#else
    bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes );
#  endif
    }

    bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType       = StructureType::eDeviceGroupPresentCapabilitiesKHR;
    void *                                             pNext       = {};
    ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
    DeviceGroupPresentModeFlagsKHR                     modes       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupPresentCapabilitiesKHR>
  {
    using Type = DeviceGroupPresentCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
  {
    using Type = DeviceGroupPresentCapabilitiesKHR;
  };

  // wrapper struct for struct VkDeviceGroupPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentInfoKHR.html
  struct DeviceGroupPresentInfoKHR
  {
    using NativeType = VkDeviceGroupPresentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupPresentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t                          swapchainCount_ = {},
                                                    const uint32_t *                  pDeviceMasks_   = {},
                                                    DeviceGroupPresentModeFlagBitsKHR mode_           = DeviceGroupPresentModeFlagBitsKHR::eLocal,
                                                    const void *                      pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pDeviceMasks{ pDeviceMasks_ }
      , mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupPresentInfoKHR( ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_,
                               DeviceGroupPresentModeFlagBitsKHR               mode_  = DeviceGroupPresentModeFlagBitsKHR::eLocal,
                               const void *                                    pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceMasks = pDeviceMasks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupPresentInfoKHR & setDeviceMasks( ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
      pDeviceMasks   = deviceMasks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>( this );
    }

    operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR *>( this );
    }

    operator VkDeviceGroupPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>( this );
    }

    operator VkDeviceGroupPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, DeviceGroupPresentModeFlagBitsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
#else
    bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) &&
             ( mode == rhs.mode );
#  endif
    }

    bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType          = StructureType::eDeviceGroupPresentInfoKHR;
    const void *                      pNext          = {};
    uint32_t                          swapchainCount = {};
    const uint32_t *                  pDeviceMasks   = {};
    DeviceGroupPresentModeFlagBitsKHR mode           = DeviceGroupPresentModeFlagBitsKHR::eLocal;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupPresentInfoKHR>
  {
    using Type = DeviceGroupPresentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
  {
    using Type = DeviceGroupPresentInfoKHR;
  };

  // wrapper struct for struct VkDeviceGroupRenderPassBeginInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupRenderPassBeginInfo.html
  struct DeviceGroupRenderPassBeginInfo
  {
    using NativeType = VkDeviceGroupRenderPassBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupRenderPassBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t       deviceMask_            = {},
                                                         uint32_t       deviceRenderAreaCount_ = {},
                                                         const Rect2D * pDeviceRenderAreas_    = {},
                                                         const void *   pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceMask{ deviceMask_ }
      , deviceRenderAreaCount{ deviceRenderAreaCount_ }
      , pDeviceRenderAreas{ pDeviceRenderAreas_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, ArrayProxyNoTemporaries<const Rect2D> const & deviceRenderAreas_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , deviceMask( deviceMask_ )
      , deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) )
      , pDeviceRenderAreas( deviceRenderAreas_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceRenderAreaCount = deviceRenderAreaCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceRenderAreas = pDeviceRenderAreas_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( ArrayProxyNoTemporaries<const Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
      pDeviceRenderAreas    = deviceRenderAreas_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>( this );
    }

    operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>( this );
    }

    operator VkDeviceGroupRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>( this );
    }

    operator VkDeviceGroupRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const Rect2D * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
#else
    bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) &&
             ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
#  endif
    }

    bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                 = StructureType::eDeviceGroupRenderPassBeginInfo;
    const void *   pNext                 = {};
    uint32_t       deviceMask            = {};
    uint32_t       deviceRenderAreaCount = {};
    const Rect2D * pDeviceRenderAreas    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupRenderPassBeginInfo>
  {
    using Type = DeviceGroupRenderPassBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
  {
    using Type = DeviceGroupRenderPassBeginInfo;
  };

  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;

  // wrapper struct for struct VkDeviceGroupSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSubmitInfo.html
  struct DeviceGroupSubmitInfo
  {
    using NativeType = VkDeviceGroupSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t         waitSemaphoreCount_            = {},
                                                const uint32_t * pWaitSemaphoreDeviceIndices_   = {},
                                                uint32_t         commandBufferCount_            = {},
                                                const uint32_t * pCommandBufferDeviceMasks_     = {},
                                                uint32_t         signalSemaphoreCount_          = {},
                                                const uint32_t * pSignalSemaphoreDeviceIndices_ = {},
                                                const void *     pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreCount{ waitSemaphoreCount_ }
      , pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ }
      , commandBufferCount{ commandBufferCount_ }
      , pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ }
      , signalSemaphoreCount{ signalSemaphoreCount_ }
      , pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo( ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_,
                           ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_     = {},
                           ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {},
                           const void *                                    pNext_                        = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) )
      , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() )
      , commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) )
      , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) )
      , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount          = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
      pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount        = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
      pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount          = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
      pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupSubmitInfo *>( this );
    }

    operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupSubmitInfo *>( this );
    }

    operator VkDeviceGroupSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupSubmitInfo *>( this );
    }

    operator VkDeviceGroupSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const uint32_t * const &,
               uint32_t const &,
               const uint32_t * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       waitSemaphoreCount,
                       pWaitSemaphoreDeviceIndices,
                       commandBufferCount,
                       pCommandBufferDeviceMasks,
                       signalSemaphoreCount,
                       pSignalSemaphoreDeviceIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
#else
    bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) &&
             ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
             ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
#  endif
    }

    bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                         = StructureType::eDeviceGroupSubmitInfo;
    const void *     pNext                         = {};
    uint32_t         waitSemaphoreCount            = {};
    const uint32_t * pWaitSemaphoreDeviceIndices   = {};
    uint32_t         commandBufferCount            = {};
    const uint32_t * pCommandBufferDeviceMasks     = {};
    uint32_t         signalSemaphoreCount          = {};
    const uint32_t * pSignalSemaphoreDeviceIndices = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupSubmitInfo>
  {
    using Type = DeviceGroupSubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
  {
    using Type = DeviceGroupSubmitInfo;
  };

  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;

  // wrapper struct for struct VkDeviceGroupSwapchainCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html
  struct DeviceGroupSwapchainCreateInfoKHR
  {
    using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceGroupSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , modes{ modes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
    {
    }

    DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
    {
      modes = modes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

    operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

    operator VkDeviceGroupSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

    operator VkDeviceGroupSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceGroupPresentModeFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, modes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes );
#  endif
    }

    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
    const void *                   pNext = {};
    DeviceGroupPresentModeFlagsKHR modes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceGroupSwapchainCreateInfoKHR>
  {
    using Type = DeviceGroupSwapchainCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
  {
    using Type = DeviceGroupSwapchainCreateInfoKHR;
  };

  // wrapper struct for struct VkImageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateInfo.html
  struct ImageCreateInfo
  {
    using NativeType = VkImageCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateFlags    flags_                 = {},
                                          ImageType           imageType_             = ImageType::e1D,
                                          Format              format_                = Format::eUndefined,
                                          Extent3D            extent_                = {},
                                          uint32_t            mipLevels_             = {},
                                          uint32_t            arrayLayers_           = {},
                                          SampleCountFlagBits samples_               = SampleCountFlagBits::e1,
                                          ImageTiling         tiling_                = ImageTiling::eOptimal,
                                          ImageUsageFlags     usage_                 = {},
                                          SharingMode         sharingMode_           = SharingMode::eExclusive,
                                          uint32_t            queueFamilyIndexCount_ = {},
                                          const uint32_t *    pQueueFamilyIndices_   = {},
                                          ImageLayout         initialLayout_         = ImageLayout::eUndefined,
                                          const void *        pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , imageType{ imageType_ }
      , format{ format_ }
      , extent{ extent_ }
      , mipLevels{ mipLevels_ }
      , arrayLayers{ arrayLayers_ }
      , samples{ samples_ }
      , tiling{ tiling_ }
      , usage{ usage_ }
      , sharingMode{ sharingMode_ }
      , queueFamilyIndexCount{ queueFamilyIndexCount_ }
      , pQueueFamilyIndices{ pQueueFamilyIndices_ }
      , initialLayout{ initialLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCreateInfo( ImageCreateFlags                                flags_,
                     ImageType                                       imageType_,
                     Format                                          format_,
                     Extent3D                                        extent_,
                     uint32_t                                        mipLevels_,
                     uint32_t                                        arrayLayers_,
                     SampleCountFlagBits                             samples_,
                     ImageTiling                                     tiling_,
                     ImageUsageFlags                                 usage_,
                     SharingMode                                     sharingMode_,
                     ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                     ImageLayout                                     initialLayout_ = ImageLayout::eUndefined,
                     const void *                                    pNext_         = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , imageType( imageType_ )
      , format( format_ )
      , extent( extent_ )
      , mipLevels( mipLevels_ )
      , arrayLayers( arrayLayers_ )
      , samples( samples_ )
      , tiling( tiling_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
      , initialLayout( initialLayout_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
    {
      imageType = imageType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevels = mipLevels_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayLayers = arrayLayers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCreateInfo & setQueueFamilyIndices( ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCreateInfo *>( this );
    }

    operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCreateInfo *>( this );
    }

    operator VkImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCreateInfo *>( this );
    }

    operator VkImageCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageCreateFlags const &,
               ImageType const &,
               Format const &,
               Extent3D const &,
               uint32_t const &,
               uint32_t const &,
               SampleCountFlagBits const &,
               ImageTiling const &,
               ImageUsageFlags const &,
               SharingMode const &,
               uint32_t const &,
               const uint32_t * const &,
               ImageLayout const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       imageType,
                       format,
                       extent,
                       mipLevels,
                       arrayLayers,
                       samples,
                       tiling,
                       usage,
                       sharingMode,
                       queueFamilyIndexCount,
                       pQueueFamilyIndices,
                       initialLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCreateInfo const & ) const = default;
#else
    bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) &&
             ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) &&
             ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
             ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) &&
             ( initialLayout == rhs.initialLayout );
#  endif
    }

    bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                 = StructureType::eImageCreateInfo;
    const void *        pNext                 = {};
    ImageCreateFlags    flags                 = {};
    ImageType           imageType             = ImageType::e1D;
    Format              format                = Format::eUndefined;
    Extent3D            extent                = {};
    uint32_t            mipLevels             = {};
    uint32_t            arrayLayers           = {};
    SampleCountFlagBits samples               = SampleCountFlagBits::e1;
    ImageTiling         tiling                = ImageTiling::eOptimal;
    ImageUsageFlags     usage                 = {};
    SharingMode         sharingMode           = SharingMode::eExclusive;
    uint32_t            queueFamilyIndexCount = {};
    const uint32_t *    pQueueFamilyIndices   = {};
    ImageLayout         initialLayout         = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCreateInfo>
  {
    using Type = ImageCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageCreateInfo>
  {
    using Type = ImageCreateInfo;
  };

  // wrapper struct for struct VkDeviceImageMemoryRequirements, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageMemoryRequirements.html
  struct DeviceImageMemoryRequirements
  {
    using NativeType = VkDeviceImageMemoryRequirements;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceImageMemoryRequirements;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( const ImageCreateInfo * pCreateInfo_ = {},
                                                        ImageAspectFlagBits     planeAspect_ = ImageAspectFlagBits::eColor,
                                                        const void *            pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pCreateInfo{ pCreateInfo_ }
      , planeAspect{ planeAspect_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
    {
    }

    DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pCreateInfo = pCreateInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
    {
      planeAspect = planeAspect_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceImageMemoryRequirements *>( this );
    }

    operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceImageMemoryRequirements *>( this );
    }

    operator VkDeviceImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceImageMemoryRequirements *>( this );
    }

    operator VkDeviceImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceImageMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const ImageCreateInfo * const &, ImageAspectFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pCreateInfo, planeAspect );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceImageMemoryRequirements const & ) const = default;
#else
    bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect );
#  endif
    }

    bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType       = StructureType::eDeviceImageMemoryRequirements;
    const void *            pNext       = {};
    const ImageCreateInfo * pCreateInfo = {};
    ImageAspectFlagBits     planeAspect = ImageAspectFlagBits::eColor;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceImageMemoryRequirements>
  {
    using Type = DeviceImageMemoryRequirements;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
  {
    using Type = DeviceImageMemoryRequirements;
  };

  using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;

  // wrapper struct for struct VkImageSubresource2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource2.html
  struct ImageSubresource2
  {
    using NativeType = VkImageSubresource2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageSubresource2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageSubresource{ imageSubresource_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource2( *reinterpret_cast<ImageSubresource2 const *>( &rhs ) ) {}

    ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSubresource2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresource2 *>( this );
    }

    operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresource2 *>( this );
    }

    operator VkImageSubresource2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSubresource2 *>( this );
    }

    operator VkImageSubresource2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSubresource2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageSubresource const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageSubresource );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresource2 const & ) const = default;
#else
    bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource );
#  endif
    }

    bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eImageSubresource2;
    void *           pNext            = {};
    ImageSubresource imageSubresource = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSubresource2>
  {
    using Type = ImageSubresource2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageSubresource2>
  {
    using Type = ImageSubresource2;
  };

  using ImageSubresource2EXT = ImageSubresource2;
  using ImageSubresource2KHR = ImageSubresource2;

  // wrapper struct for struct VkDeviceImageSubresourceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageSubresourceInfo.html
  struct DeviceImageSubresourceInfo
  {
    using NativeType = VkDeviceImageSubresourceInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceImageSubresourceInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( const ImageCreateInfo *   pCreateInfo_  = {},
                                                     const ImageSubresource2 * pSubresource_ = {},
                                                     const void *              pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pCreateInfo{ pCreateInfo_ }
      , pSubresource{ pSubresource_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceImageSubresourceInfo( *reinterpret_cast<DeviceImageSubresourceInfo const *>( &rhs ) )
    {
    }

    DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceImageSubresourceInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pCreateInfo = pCreateInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const ImageSubresource2 * pSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubresource = pSubresource_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceImageSubresourceInfo *>( this );
    }

    operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceImageSubresourceInfo *>( this );
    }

    operator VkDeviceImageSubresourceInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceImageSubresourceInfo *>( this );
    }

    operator VkDeviceImageSubresourceInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceImageSubresourceInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const ImageCreateInfo * const &, const ImageSubresource2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pCreateInfo, pSubresource );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceImageSubresourceInfo const & ) const = default;
#else
    bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource );
#  endif
    }

    bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType        = StructureType::eDeviceImageSubresourceInfo;
    const void *              pNext        = {};
    const ImageCreateInfo *   pCreateInfo  = {};
    const ImageSubresource2 * pSubresource = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceImageSubresourceInfo>
  {
    using Type = DeviceImageSubresourceInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceImageSubresourceInfo>
  {
    using Type = DeviceImageSubresourceInfo;
  };

  using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo;

  // wrapper struct for struct VkDeviceMemoryOpaqueCaptureAddressInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOpaqueCaptureAddressInfo.html
  struct DeviceMemoryOpaqueCaptureAddressInfo
  {
    using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
    {
    }

    DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

    operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

    operator VkDeviceMemoryOpaqueCaptureAddressInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

    operator VkDeviceMemoryOpaqueCaptureAddressInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
#else
    bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
#  endif
    }

    bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
    const void *  pNext  = {};
    DeviceMemory  memory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceMemoryOpaqueCaptureAddressInfo>
  {
    using Type = DeviceMemoryOpaqueCaptureAddressInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
  {
    using Type = DeviceMemoryOpaqueCaptureAddressInfo;
  };

  using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;

  // wrapper struct for struct VkDeviceMemoryOverallocationCreateInfoAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html
  struct DeviceMemoryOverallocationCreateInfoAMD
  {
    using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceMemoryOverallocationCreateInfoAMD( MemoryOverallocationBehaviorAMD overallocationBehavior_ = MemoryOverallocationBehaviorAMD::eDefault,
                                               const void *                    pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , overallocationBehavior{ overallocationBehavior_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
    {
    }

    DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD &
      setOverallocationBehavior( MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
    {
      overallocationBehavior = overallocationBehavior_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

    operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

    operator VkDeviceMemoryOverallocationCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

    operator VkDeviceMemoryOverallocationCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MemoryOverallocationBehaviorAMD const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, overallocationBehavior );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default;
#else
    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior );
#  endif
    }

    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                  = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
    const void *                    pNext                  = {};
    MemoryOverallocationBehaviorAMD overallocationBehavior = MemoryOverallocationBehaviorAMD::eDefault;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceMemoryOverallocationCreateInfoAMD>
  {
    using Type = DeviceMemoryOverallocationCreateInfoAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
  {
    using Type = DeviceMemoryOverallocationCreateInfoAMD;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  union DeviceOrHostAddressConstAMDX
  {
    using NativeType = VkDeviceOrHostAddressConstAMDX;
#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {}

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hostAddress = hostAddress_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceOrHostAddressConstAMDX const &() const
    {
      return *reinterpret_cast<const VkDeviceOrHostAddressConstAMDX *>( this );
    }

    operator VkDeviceOrHostAddressConstAMDX &()
    {
      return *reinterpret_cast<VkDeviceOrHostAddressConstAMDX *>( this );
    }

#  ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    DeviceAddress deviceAddress;
    const void *  hostAddress;
#  else
    VkDeviceAddress deviceAddress;
    const void *    hostAddress;
#  endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceOrHostAddressConstAMDX>
  {
    using Type = DeviceOrHostAddressConstAMDX;
  };
#  endif
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkDevicePipelineBinaryInternalCacheControlKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePipelineBinaryInternalCacheControlKHR.html
  struct DevicePipelineBinaryInternalCacheControlKHR
  {
    using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDevicePipelineBinaryInternalCacheControlKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( Bool32 disableInternalCache_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , disableInternalCache{ disableInternalCache_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast<DevicePipelineBinaryInternalCacheControlKHR const *>( &rhs ) )
    {
    }

    DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DevicePipelineBinaryInternalCacheControlKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setDisableInternalCache( Bool32 disableInternalCache_ ) VULKAN_HPP_NOEXCEPT
    {
      disableInternalCache = disableInternalCache_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDevicePipelineBinaryInternalCacheControlKHR *>( this );
    }

    operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDevicePipelineBinaryInternalCacheControlKHR *>( this );
    }

    operator VkDevicePipelineBinaryInternalCacheControlKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDevicePipelineBinaryInternalCacheControlKHR *>( this );
    }

    operator VkDevicePipelineBinaryInternalCacheControlKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDevicePipelineBinaryInternalCacheControlKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, disableInternalCache );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default;
#else
    bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disableInternalCache == rhs.disableInternalCache );
#  endif
    }

    bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eDevicePipelineBinaryInternalCacheControlKHR;
    const void *  pNext                = {};
    Bool32        disableInternalCache = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDevicePipelineBinaryInternalCacheControlKHR>
  {
    using Type = DevicePipelineBinaryInternalCacheControlKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDevicePipelineBinaryInternalCacheControlKHR>
  {
    using Type = DevicePipelineBinaryInternalCacheControlKHR;
  };

  // wrapper struct for struct VkDevicePrivateDataCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePrivateDataCreateInfo.html
  struct DevicePrivateDataCreateInfo
  {
    using NativeType = VkDevicePrivateDataCreateInfo;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDevicePrivateDataCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , privateDataSlotRequestCount{ privateDataSlotRequestCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
    {
    }

    DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
    {
      privateDataSlotRequestCount = privateDataSlotRequestCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDevicePrivateDataCreateInfo *>( this );
    }

    operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDevicePrivateDataCreateInfo *>( this );
    }

    operator VkDevicePrivateDataCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDevicePrivateDataCreateInfo *>( this );
    }

    operator VkDevicePrivateDataCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDevicePrivateDataCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, privateDataSlotRequestCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default;
#else
    bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
#  endif
    }

    bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::eDevicePrivateDataCreateInfo;
    const void *  pNext                       = {};
    uint32_t      privateDataSlotRequestCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDevicePrivateDataCreateInfo>
  {
    using Type = DevicePrivateDataCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
  {
    using Type = DevicePrivateDataCreateInfo;
  };

  using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;

  // wrapper struct for struct VkDeviceQueueGlobalPriorityCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueGlobalPriorityCreateInfo.html
  struct DeviceQueueGlobalPriorityCreateInfo
  {
    using NativeType = VkDeviceQueueGlobalPriorityCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceQueueGlobalPriorityCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( QueueGlobalPriority globalPriority_ = QueueGlobalPriority::eLow,
                                                              const void *        pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , globalPriority{ globalPriority_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfo const *>( &rhs ) )
    {
    }

    DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setGlobalPriority( QueueGlobalPriority globalPriority_ ) VULKAN_HPP_NOEXCEPT
    {
      globalPriority = globalPriority_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfo *>( this );
    }

    operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfo *>( this );
    }

    operator VkDeviceQueueGlobalPriorityCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfo *>( this );
    }

    operator VkDeviceQueueGlobalPriorityCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, QueueGlobalPriority const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, globalPriority );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default;
#else
    bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority );
#  endif
    }

    bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType          = StructureType::eDeviceQueueGlobalPriorityCreateInfo;
    const void *        pNext          = {};
    QueueGlobalPriority globalPriority = QueueGlobalPriority::eLow;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceQueueGlobalPriorityCreateInfo>
  {
    using Type = DeviceQueueGlobalPriorityCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfo>
  {
    using Type = DeviceQueueGlobalPriorityCreateInfo;
  };

  using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo;
  using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo;

  // wrapper struct for struct VkDeviceQueueInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueInfo2.html
  struct DeviceQueueInfo2
  {
    using NativeType = VkDeviceQueueInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceQueueInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueCreateFlags flags_            = {},
                                           uint32_t               queueFamilyIndex_ = {},
                                           uint32_t               queueIndex_       = {},
                                           const void *           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
      , queueIndex{ queueIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) ) {}

    DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueIndex = queueIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueInfo2 *>( this );
    }

    operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueInfo2 *>( this );
    }

    operator VkDeviceQueueInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceQueueInfo2 *>( this );
    }

    operator VkDeviceQueueInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceQueueInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueInfo2 const & ) const = default;
#else
    bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
             ( queueIndex == rhs.queueIndex );
#  endif
    }

    bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType            = StructureType::eDeviceQueueInfo2;
    const void *           pNext            = {};
    DeviceQueueCreateFlags flags            = {};
    uint32_t               queueFamilyIndex = {};
    uint32_t               queueIndex       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceQueueInfo2>
  {
    using Type = DeviceQueueInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
  {
    using Type = DeviceQueueInfo2;
  };

  // wrapper struct for struct VkDeviceQueueShaderCoreControlCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueShaderCoreControlCreateInfoARM.html
  struct DeviceQueueShaderCoreControlCreateInfoARM
  {
    using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCoreCount{ shaderCoreCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast<DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs ) )
    {
    }

    DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceQueueShaderCoreControlCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCoreCount = shaderCoreCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
    }

    operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
    }

    operator VkDeviceQueueShaderCoreControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
    }

    operator VkDeviceQueueShaderCoreControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceQueueShaderCoreControlCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCoreCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default;
#else
    bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount );
#  endif
    }

    bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM;
    void *        pNext           = {};
    uint32_t      shaderCoreCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceQueueShaderCoreControlCreateInfoARM>
  {
    using Type = DeviceQueueShaderCoreControlCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueShaderCoreControlCreateInfoARM>
  {
    using Type = DeviceQueueShaderCoreControlCreateInfoARM;
  };

  // wrapper struct for struct VkTensorDescriptionARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorDescriptionARM.html
  struct TensorDescriptionARM
  {
    using NativeType = VkTensorDescriptionARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorDescriptionARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorDescriptionARM( TensorTilingARM     tiling_         = TensorTilingARM::eOptimal,
                                               Format              format_         = Format::eUndefined,
                                               uint32_t            dimensionCount_ = {},
                                               const int64_t *     pDimensions_    = {},
                                               const int64_t *     pStrides_       = {},
                                               TensorUsageFlagsARM usage_          = {},
                                               const void *        pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tiling{ tiling_ }
      , format{ format_ }
      , dimensionCount{ dimensionCount_ }
      , pDimensions{ pDimensions_ }
      , pStrides{ pStrides_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorDescriptionARM( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorDescriptionARM( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorDescriptionARM( *reinterpret_cast<TensorDescriptionARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorDescriptionARM( TensorTilingARM                                tiling_,
                          Format                                         format_,
                          ArrayProxyNoTemporaries<const int64_t> const & dimensions_,
                          ArrayProxyNoTemporaries<const int64_t> const & strides_ = {},
                          TensorUsageFlagsARM                            usage_   = {},
                          const void *                                   pNext_   = nullptr )
      : pNext( pNext_ )
      , tiling( tiling_ )
      , format( format_ )
      , dimensionCount( static_cast<uint32_t>( dimensions_.size() ) )
      , pDimensions( dimensions_.data() )
      , pStrides( strides_.data() )
      , usage( usage_ )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( strides_.empty() || ( dimensions_.size() == strides_.size() ) );
#    else
      if ( !strides_.empty() && ( dimensions_.size() != strides_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::TensorDescriptionARM::TensorDescriptionARM: !strides_.empty() && ( dimensions_.size() != strides_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    TensorDescriptionARM & operator=( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorDescriptionARM & operator=( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorDescriptionARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setTiling( TensorTilingARM tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setDimensionCount( uint32_t dimensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = dimensionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPDimensions( const int64_t * pDimensions_ ) VULKAN_HPP_NOEXCEPT
    {
      pDimensions = pDimensions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorDescriptionARM & setDimensions( ArrayProxyNoTemporaries<const int64_t> const & dimensions_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = static_cast<uint32_t>( dimensions_.size() );
      pDimensions    = dimensions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPStrides( const int64_t * pStrides_ ) VULKAN_HPP_NOEXCEPT
    {
      pStrides = pStrides_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorDescriptionARM & setStrides( ArrayProxyNoTemporaries<const int64_t> const & strides_ ) VULKAN_HPP_NOEXCEPT
    {
      dimensionCount = static_cast<uint32_t>( strides_.size() );
      pStrides       = strides_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setUsage( TensorUsageFlagsARM usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorDescriptionARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorDescriptionARM *>( this );
    }

    operator VkTensorDescriptionARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorDescriptionARM *>( this );
    }

    operator VkTensorDescriptionARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorDescriptionARM *>( this );
    }

    operator VkTensorDescriptionARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorDescriptionARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               TensorTilingARM const &,
               Format const &,
               uint32_t const &,
               const int64_t * const &,
               const int64_t * const &,
               TensorUsageFlagsARM const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tiling, format, dimensionCount, pDimensions, pStrides, usage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorDescriptionARM const & ) const = default;
#else
    bool operator==( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tiling == rhs.tiling ) && ( format == rhs.format ) &&
             ( dimensionCount == rhs.dimensionCount ) && ( pDimensions == rhs.pDimensions ) && ( pStrides == rhs.pStrides ) && ( usage == rhs.usage );
#  endif
    }

    bool operator!=( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType          = StructureType::eTensorDescriptionARM;
    const void *        pNext          = {};
    TensorTilingARM     tiling         = TensorTilingARM::eOptimal;
    Format              format         = Format::eUndefined;
    uint32_t            dimensionCount = {};
    const int64_t *     pDimensions    = {};
    const int64_t *     pStrides       = {};
    TensorUsageFlagsARM usage          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorDescriptionARM>
  {
    using Type = TensorDescriptionARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorDescriptionARM>
  {
    using Type = TensorDescriptionARM;
  };

  // wrapper struct for struct VkTensorCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCreateInfoARM.html
  struct TensorCreateInfoARM
  {
    using NativeType = VkTensorCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorCreateInfoARM( TensorCreateFlagsARM         flags_                 = {},
                                              const TensorDescriptionARM * pDescription_          = {},
                                              SharingMode                  sharingMode_           = SharingMode::eExclusive,
                                              uint32_t                     queueFamilyIndexCount_ = {},
                                              const uint32_t *             pQueueFamilyIndices_   = {},
                                              const void *                 pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pDescription{ pDescription_ }
      , sharingMode{ sharingMode_ }
      , queueFamilyIndexCount{ queueFamilyIndexCount_ }
      , pQueueFamilyIndices{ pQueueFamilyIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorCreateInfoARM( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorCreateInfoARM( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : TensorCreateInfoARM( *reinterpret_cast<TensorCreateInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCreateInfoARM( TensorCreateFlagsARM                            flags_,
                         const TensorDescriptionARM *                    pDescription_,
                         SharingMode                                     sharingMode_,
                         ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                         const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , pDescription( pDescription_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    TensorCreateInfoARM & operator=( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorCreateInfoARM & operator=( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setFlags( TensorCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPDescription( const TensorDescriptionARM * pDescription_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescription = pDescription_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setSharingMode( SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TensorCreateInfoARM & setQueueFamilyIndices( ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorCreateInfoARM *>( this );
    }

    operator VkTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorCreateInfoARM *>( this );
    }

    operator VkTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorCreateInfoARM *>( this );
    }

    operator VkTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               TensorCreateFlagsARM const &,
               const TensorDescriptionARM * const &,
               SharingMode const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pDescription, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorCreateInfoARM const & ) const = default;
#else
    bool operator==( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pDescription == rhs.pDescription ) &&
             ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#  endif
    }

    bool operator!=( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                 = StructureType::eTensorCreateInfoARM;
    const void *                 pNext                 = {};
    TensorCreateFlagsARM         flags                 = {};
    const TensorDescriptionARM * pDescription          = {};
    SharingMode                  sharingMode           = SharingMode::eExclusive;
    uint32_t                     queueFamilyIndexCount = {};
    const uint32_t *             pQueueFamilyIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorCreateInfoARM>
  {
    using Type = TensorCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorCreateInfoARM>
  {
    using Type = TensorCreateInfoARM;
  };

  // wrapper struct for struct VkDeviceTensorMemoryRequirementsARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceTensorMemoryRequirementsARM.html
  struct DeviceTensorMemoryRequirementsARM
  {
    using NativeType = VkDeviceTensorMemoryRequirementsARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDeviceTensorMemoryRequirementsARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM( const TensorCreateInfoARM * pCreateInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pCreateInfo{ pCreateInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceTensorMemoryRequirementsARM( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceTensorMemoryRequirementsARM( *reinterpret_cast<DeviceTensorMemoryRequirementsARM const *>( &rhs ) )
    {
    }

    DeviceTensorMemoryRequirementsARM & operator=( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DeviceTensorMemoryRequirementsARM & operator=( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DeviceTensorMemoryRequirementsARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPCreateInfo( const TensorCreateInfoARM * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pCreateInfo = pCreateInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDeviceTensorMemoryRequirementsARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( this );
    }

    operator VkDeviceTensorMemoryRequirementsARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceTensorMemoryRequirementsARM *>( this );
    }

    operator VkDeviceTensorMemoryRequirementsARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( this );
    }

    operator VkDeviceTensorMemoryRequirementsARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDeviceTensorMemoryRequirementsARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const TensorCreateInfoARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pCreateInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceTensorMemoryRequirementsARM const & ) const = default;
#else
    bool operator==( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo );
#  endif
    }

    bool operator!=( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType       = StructureType::eDeviceTensorMemoryRequirementsARM;
    const void *                pNext       = {};
    const TensorCreateInfoARM * pCreateInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDeviceTensorMemoryRequirementsARM>
  {
    using Type = DeviceTensorMemoryRequirementsARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDeviceTensorMemoryRequirementsARM>
  {
    using Type = DeviceTensorMemoryRequirementsARM;
  };

  typedef PFN_vkVoidFunction( VKAPI_PTR * PFN_GetInstanceProcAddrLUNARG )( Instance instance, const char * pName );

  // wrapper struct for struct VkDirectDriverLoadingInfoLUNARG, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingInfoLUNARG.html
  struct DirectDriverLoadingInfoLUNARG
  {
    using NativeType = VkDirectDriverLoadingInfoLUNARG;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDirectDriverLoadingInfoLUNARG;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingFlagsLUNARG flags_                  = {},
                                                        PFN_GetInstanceProcAddrLUNARG  pfnGetInstanceProcAddr_ = {},
                                                        void *                         pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
      : DirectDriverLoadingInfoLUNARG( *reinterpret_cast<DirectDriverLoadingInfoLUNARG const *>( &rhs ) )
    {
    }

    DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DirectDriverLoadingInfoLUNARG const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG &
      setPfnGetInstanceProcAddr( PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDirectDriverLoadingInfoLUNARG *>( this );
    }

    operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDirectDriverLoadingInfoLUNARG *>( this );
    }

    operator VkDirectDriverLoadingInfoLUNARG const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDirectDriverLoadingInfoLUNARG *>( this );
    }

    operator VkDirectDriverLoadingInfoLUNARG *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDirectDriverLoadingInfoLUNARG *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DirectDriverLoadingFlagsLUNARG const &, PFN_GetInstanceProcAddrLUNARG const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr );
    }
#endif

    bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr );
#endif
    }

    bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                  sType                  = StructureType::eDirectDriverLoadingInfoLUNARG;
    void *                         pNext                  = {};
    DirectDriverLoadingFlagsLUNARG flags                  = {};
    PFN_GetInstanceProcAddrLUNARG  pfnGetInstanceProcAddr = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDirectDriverLoadingInfoLUNARG>
  {
    using Type = DirectDriverLoadingInfoLUNARG;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDirectDriverLoadingInfoLUNARG>
  {
    using Type = DirectDriverLoadingInfoLUNARG;
  };

  // wrapper struct for struct VkDirectDriverLoadingListLUNARG, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingListLUNARG.html
  struct DirectDriverLoadingListLUNARG
  {
    using NativeType = VkDirectDriverLoadingListLUNARG;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDirectDriverLoadingListLUNARG;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingModeLUNARG         mode_        = DirectDriverLoadingModeLUNARG::eExclusive,
                                                        uint32_t                              driverCount_ = {},
                                                        const DirectDriverLoadingInfoLUNARG * pDrivers_    = {},
                                                        const void *                          pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mode{ mode_ }
      , driverCount{ driverCount_ }
      , pDrivers{ pDrivers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
      : DirectDriverLoadingListLUNARG( *reinterpret_cast<DirectDriverLoadingListLUNARG const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DirectDriverLoadingListLUNARG( DirectDriverLoadingModeLUNARG                                        mode_,
                                   ArrayProxyNoTemporaries<const DirectDriverLoadingInfoLUNARG> const & drivers_,
                                   const void *                                                         pNext_ = nullptr )
      : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast<uint32_t>( drivers_.size() ) ), pDrivers( drivers_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DirectDriverLoadingListLUNARG const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT
    {
      driverCount = driverCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPDrivers( const DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT
    {
      pDrivers = pDrivers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DirectDriverLoadingListLUNARG & setDrivers( ArrayProxyNoTemporaries<const DirectDriverLoadingInfoLUNARG> const & drivers_ ) VULKAN_HPP_NOEXCEPT
    {
      driverCount = static_cast<uint32_t>( drivers_.size() );
      pDrivers    = drivers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDirectDriverLoadingListLUNARG *>( this );
    }

    operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDirectDriverLoadingListLUNARG *>( this );
    }

    operator VkDirectDriverLoadingListLUNARG const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDirectDriverLoadingListLUNARG *>( this );
    }

    operator VkDirectDriverLoadingListLUNARG *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDirectDriverLoadingListLUNARG *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, DirectDriverLoadingModeLUNARG const &, uint32_t const &, const DirectDriverLoadingInfoLUNARG * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mode, driverCount, pDrivers );
    }
#endif

    bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( driverCount == rhs.driverCount ) && ( pDrivers == rhs.pDrivers );
#endif
    }

    bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                         sType       = StructureType::eDirectDriverLoadingListLUNARG;
    const void *                          pNext       = {};
    DirectDriverLoadingModeLUNARG         mode        = DirectDriverLoadingModeLUNARG::eExclusive;
    uint32_t                              driverCount = {};
    const DirectDriverLoadingInfoLUNARG * pDrivers    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDirectDriverLoadingListLUNARG>
  {
    using Type = DirectDriverLoadingListLUNARG;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDirectDriverLoadingListLUNARG>
  {
    using Type = DirectDriverLoadingListLUNARG;
  };

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  // wrapper struct for struct VkDirectFBSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectFBSurfaceCreateInfoEXT.html
  struct DirectFBSurfaceCreateInfoEXT
  {
    using NativeType = VkDirectFBSurfaceCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDirectfbSurfaceCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateFlagsEXT flags_   = {},
                                                       IDirectFB *                   dfb_     = {},
                                                       IDirectFBSurface *            surface_ = {},
                                                       const void *                  pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dfb{ dfb_ }
      , surface{ surface_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
    {
    }

    DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT
    {
      dfb = dfb_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

    operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

    operator VkDirectFBSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

    operator VkDirectFBSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DirectFBSurfaceCreateFlagsEXT const &, IDirectFB * const &, IDirectFBSurface * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, dfb, surface );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default;
#  else
    bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface );
#    endif
    }

    bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                 sType   = StructureType::eDirectfbSurfaceCreateInfoEXT;
    const void *                  pNext   = {};
    DirectFBSurfaceCreateFlagsEXT flags   = {};
    IDirectFB *                   dfb     = {};
    IDirectFBSurface *            surface = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDirectFBSurfaceCreateInfoEXT>
  {
    using Type = DirectFBSurfaceCreateInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
  {
    using Type = DirectFBSurfaceCreateInfoEXT;
  };
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkDispatchGraphCountInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphCountInfoAMDX.html
  struct DispatchGraphCountInfoAMDX
  {
    using NativeType = VkDispatchGraphCountInfoAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      DispatchGraphCountInfoAMDX( uint32_t count_ = {}, DeviceOrHostAddressConstAMDX infos_ = {}, uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : count{ count_ }
      , infos{ infos_ }
      , stride{ stride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : DispatchGraphCountInfoAMDX( *reinterpret_cast<DispatchGraphCountInfoAMDX const *>( &rhs ) )
    {
    }

    DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DispatchGraphCountInfoAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT
    {
      count = count_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT
    {
      infos = infos_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( this );
    }

    operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDispatchGraphCountInfoAMDX *>( this );
    }

    operator VkDispatchGraphCountInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( this );
    }

    operator VkDispatchGraphCountInfoAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDispatchGraphCountInfoAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, DeviceOrHostAddressConstAMDX const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( count, infos, stride );
    }
#  endif

  public:
    uint32_t                     count  = {};
    DeviceOrHostAddressConstAMDX infos  = {};
    uint64_t                     stride = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDispatchGraphCountInfoAMDX>
  {
    using Type = DispatchGraphCountInfoAMDX;
  };
#  endif
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkDispatchGraphInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphInfoAMDX.html
  struct DispatchGraphInfoAMDX
  {
    using NativeType = VkDispatchGraphInfoAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t                     nodeIndex_     = {},
                                                   uint32_t                     payloadCount_  = {},
                                                   DeviceOrHostAddressConstAMDX payloads_      = {},
                                                   uint64_t                     payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT
      : nodeIndex{ nodeIndex_ }
      , payloadCount{ payloadCount_ }
      , payloads{ payloads_ }
      , payloadStride{ payloadStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : DispatchGraphInfoAMDX( *reinterpret_cast<DispatchGraphInfoAMDX const *>( &rhs ) )
    {
    }

    DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DispatchGraphInfoAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      nodeIndex = nodeIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT
    {
      payloadCount = payloadCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT
    {
      payloads = payloads_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT
    {
      payloadStride = payloadStride_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDispatchGraphInfoAMDX *>( this );
    }

    operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDispatchGraphInfoAMDX *>( this );
    }

    operator VkDispatchGraphInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDispatchGraphInfoAMDX *>( this );
    }

    operator VkDispatchGraphInfoAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDispatchGraphInfoAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, DeviceOrHostAddressConstAMDX const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( nodeIndex, payloadCount, payloads, payloadStride );
    }
#  endif

  public:
    uint32_t                     nodeIndex     = {};
    uint32_t                     payloadCount  = {};
    DeviceOrHostAddressConstAMDX payloads      = {};
    uint64_t                     payloadStride = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDispatchGraphInfoAMDX>
  {
    using Type = DispatchGraphInfoAMDX;
  };
#  endif
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkDispatchIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchIndirectCommand.html
  struct DispatchIndirectCommand
  {
    using NativeType = VkDispatchIndirectCommand;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
      , z{ z_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
      : DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
    {
    }

    DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DispatchIndirectCommand const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDispatchIndirectCommand *>( this );
    }

    operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDispatchIndirectCommand *>( this );
    }

    operator VkDispatchIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDispatchIndirectCommand *>( this );
    }

    operator VkDispatchIndirectCommand *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDispatchIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y, z );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DispatchIndirectCommand const & ) const = default;
#else
    bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
#  endif
    }

    bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t x = {};
    uint32_t y = {};
    uint32_t z = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDispatchIndirectCommand>
  {
    using Type = DispatchIndirectCommand;
  };
#endif

  // wrapper struct for struct VkDispatchTileInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchTileInfoQCOM.html
  struct DispatchTileInfoQCOM
  {
    using NativeType = VkDispatchTileInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDispatchTileInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchTileInfoQCOM( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : DispatchTileInfoQCOM( *reinterpret_cast<DispatchTileInfoQCOM const *>( &rhs ) )
    {
    }

    DispatchTileInfoQCOM & operator=( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DispatchTileInfoQCOM & operator=( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DispatchTileInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DispatchTileInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDispatchTileInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDispatchTileInfoQCOM *>( this );
    }

    operator VkDispatchTileInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDispatchTileInfoQCOM *>( this );
    }

    operator VkDispatchTileInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDispatchTileInfoQCOM *>( this );
    }

    operator VkDispatchTileInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDispatchTileInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DispatchTileInfoQCOM const & ) const = default;
#else
    bool operator==( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eDispatchTileInfoQCOM;
    const void *  pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDispatchTileInfoQCOM>
  {
    using Type = DispatchTileInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDispatchTileInfoQCOM>
  {
    using Type = DispatchTileInfoQCOM;
  };

  // wrapper struct for struct VkDisplayEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayEventInfoEXT.html
  struct DisplayEventInfoEXT
  {
    using NativeType = VkDisplayEventInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayEventInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut,
                                              const void *        pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displayEvent{ displayEvent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
    {
    }

    DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      displayEvent = displayEvent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayEventInfoEXT *>( this );
    }

    operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayEventInfoEXT *>( this );
    }

    operator VkDisplayEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayEventInfoEXT *>( this );
    }

    operator VkDisplayEventInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayEventInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DisplayEventTypeEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, displayEvent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayEventInfoEXT const & ) const = default;
#else
    bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent );
#  endif
    }

    bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType        = StructureType::eDisplayEventInfoEXT;
    const void *        pNext        = {};
    DisplayEventTypeEXT displayEvent = DisplayEventTypeEXT::eFirstPixelOut;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayEventInfoEXT>
  {
    using Type = DisplayEventInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
  {
    using Type = DisplayEventInfoEXT;
  };

  // wrapper struct for struct VkDisplayModeParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeParametersKHR.html
  struct DisplayModeParametersKHR
  {
    using NativeType = VkDisplayModeParametersKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
      : visibleRegion{ visibleRegion_ }
      , refreshRate{ refreshRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
    {
    }

    DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
    {
      visibleRegion = visibleRegion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
    {
      refreshRate = refreshRate_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeParametersKHR *>( this );
    }

    operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeParametersKHR *>( this );
    }

    operator VkDisplayModeParametersKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayModeParametersKHR *>( this );
    }

    operator VkDisplayModeParametersKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayModeParametersKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Extent2D const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( visibleRegion, refreshRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeParametersKHR const & ) const = default;
#else
    bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate );
#  endif
    }

    bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Extent2D visibleRegion = {};
    uint32_t refreshRate   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayModeParametersKHR>
  {
    using Type = DisplayModeParametersKHR;
  };
#endif

  // wrapper struct for struct VkDisplayModeCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeCreateInfoKHR.html
  struct DisplayModeCreateInfoKHR
  {
    using NativeType = VkDisplayModeCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayModeCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_      = {},
                                                   DisplayModeParametersKHR  parameters_ = {},
                                                   const void *              pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , parameters{ parameters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
    {
    }

    DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
    {
      parameters = parameters_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( this );
    }

    operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeCreateInfoKHR *>( this );
    }

    operator VkDisplayModeCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( this );
    }

    operator VkDisplayModeCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayModeCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DisplayModeCreateFlagsKHR const &, DisplayModeParametersKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, parameters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
#else
    bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters );
#  endif
    }

    bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType      = StructureType::eDisplayModeCreateInfoKHR;
    const void *              pNext      = {};
    DisplayModeCreateFlagsKHR flags      = {};
    DisplayModeParametersKHR  parameters = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayModeCreateInfoKHR>
  {
    using Type = DisplayModeCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
  {
    using Type = DisplayModeCreateInfoKHR;
  };

  // wrapper struct for struct VkDisplayModePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModePropertiesKHR.html
  struct DisplayModePropertiesKHR
  {
    using NativeType = VkDisplayModePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModeKHR displayMode_ = {}, DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
      : displayMode{ displayMode_ }
      , parameters{ parameters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
    {
    }

    DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModePropertiesKHR *>( this );
    }

    operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModePropertiesKHR *>( this );
    }

    operator VkDisplayModePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayModePropertiesKHR *>( this );
    }

    operator VkDisplayModePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayModePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DisplayModeKHR const &, DisplayModeParametersKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( displayMode, parameters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
#else
    bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters );
#  endif
    }

    bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DisplayModeKHR           displayMode = {};
    DisplayModeParametersKHR parameters  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayModePropertiesKHR>
  {
    using Type = DisplayModePropertiesKHR;
  };
#endif

  // wrapper struct for struct VkDisplayModeProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeProperties2KHR.html
  struct DisplayModeProperties2KHR
  {
    using NativeType = VkDisplayModeProperties2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayModeProperties2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModePropertiesKHR displayModeProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displayModeProperties{ displayModeProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
    {
    }

    DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeProperties2KHR *>( this );
    }

    operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeProperties2KHR *>( this );
    }

    operator VkDisplayModeProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayModeProperties2KHR *>( this );
    }

    operator VkDisplayModeProperties2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayModeProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DisplayModePropertiesKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, displayModeProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties );
#  endif
    }

    bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                 = StructureType::eDisplayModeProperties2KHR;
    void *                   pNext                 = {};
    DisplayModePropertiesKHR displayModeProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayModeProperties2KHR>
  {
    using Type = DisplayModeProperties2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
  {
    using Type = DisplayModeProperties2KHR;
  };

  // wrapper struct for struct VkDisplayModeStereoPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeStereoPropertiesNV.html
  struct DisplayModeStereoPropertiesNV
  {
    using NativeType = VkDisplayModeStereoPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayModeStereoPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( Bool32 hdmi3DSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hdmi3DSupported{ hdmi3DSupported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeStereoPropertiesNV( *reinterpret_cast<DisplayModeStereoPropertiesNV const *>( &rhs ) )
    {
    }

    DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayModeStereoPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeStereoPropertiesNV *>( this );
    }

    operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeStereoPropertiesNV *>( this );
    }

    operator VkDisplayModeStereoPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayModeStereoPropertiesNV *>( this );
    }

    operator VkDisplayModeStereoPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayModeStereoPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hdmi3DSupported );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default;
#else
    bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdmi3DSupported == rhs.hdmi3DSupported );
#  endif
    }

    bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eDisplayModeStereoPropertiesNV;
    void *        pNext           = {};
    Bool32        hdmi3DSupported = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayModeStereoPropertiesNV>
  {
    using Type = DisplayModeStereoPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayModeStereoPropertiesNV>
  {
    using Type = DisplayModeStereoPropertiesNV;
  };

  // wrapper struct for struct VkDisplayNativeHdrSurfaceCapabilitiesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayNativeHdrSurfaceCapabilitiesAMD.html
  struct DisplayNativeHdrSurfaceCapabilitiesAMD
  {
    using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( Bool32 localDimmingSupport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , localDimmingSupport{ localDimmingSupport_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
    {
    }

    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
      return *this;
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, localDimmingSupport );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default;
#else
    bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport );
#  endif
    }

    bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
    void *        pNext               = {};
    Bool32        localDimmingSupport = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayNativeHdrSurfaceCapabilitiesAMD>
  {
    using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
  {
    using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
  };

  // wrapper struct for struct VkDisplayPlaneCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilitiesKHR.html
  struct DisplayPlaneCapabilitiesKHR
  {
    using NativeType = VkDisplayPlaneCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
                                                      Offset2D                  minSrcPosition_ = {},
                                                      Offset2D                  maxSrcPosition_ = {},
                                                      Extent2D                  minSrcExtent_   = {},
                                                      Extent2D                  maxSrcExtent_   = {},
                                                      Offset2D                  minDstPosition_ = {},
                                                      Offset2D                  maxDstPosition_ = {},
                                                      Extent2D                  minDstExtent_   = {},
                                                      Extent2D                  maxDstExtent_   = {} ) VULKAN_HPP_NOEXCEPT
      : supportedAlpha{ supportedAlpha_ }
      , minSrcPosition{ minSrcPosition_ }
      , maxSrcPosition{ maxSrcPosition_ }
      , minSrcExtent{ minSrcExtent_ }
      , maxSrcExtent{ maxSrcExtent_ }
      , minDstPosition{ minDstPosition_ }
      , maxDstPosition{ maxDstPosition_ }
      , minDstExtent{ minDstExtent_ }
      , maxDstExtent{ maxDstExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
    {
    }

    DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR *>( this );
    }

    operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( this );
    }

    operator VkDisplayPlaneCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR *>( this );
    }

    operator VkDisplayPlaneCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DisplayPlaneAlphaFlagsKHR const &,
               Offset2D const &,
               Offset2D const &,
               Extent2D const &,
               Extent2D const &,
               Offset2D const &,
               Offset2D const &,
               Extent2D const &,
               Extent2D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
#else
    bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) &&
             ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) &&
             ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent );
#  endif
    }

    bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
    Offset2D                  minSrcPosition = {};
    Offset2D                  maxSrcPosition = {};
    Extent2D                  minSrcExtent   = {};
    Extent2D                  maxSrcExtent   = {};
    Offset2D                  minDstPosition = {};
    Offset2D                  maxDstPosition = {};
    Extent2D                  minDstExtent   = {};
    Extent2D                  maxDstExtent   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPlaneCapabilitiesKHR>
  {
    using Type = DisplayPlaneCapabilitiesKHR;
  };
#endif

  // wrapper struct for struct VkDisplayPlaneCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilities2KHR.html
  struct DisplayPlaneCapabilities2KHR
  {
    using NativeType = VkDisplayPlaneCapabilities2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayPlaneCapabilities2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilitiesKHR capabilities_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , capabilities{ capabilities_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
    {
    }

    DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR *>( this );
    }

    operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( this );
    }

    operator VkDisplayPlaneCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPlaneCapabilities2KHR *>( this );
    }

    operator VkDisplayPlaneCapabilities2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DisplayPlaneCapabilitiesKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, capabilities );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities );
#  endif
    }

    bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType        = StructureType::eDisplayPlaneCapabilities2KHR;
    void *                      pNext        = {};
    DisplayPlaneCapabilitiesKHR capabilities = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPlaneCapabilities2KHR>
  {
    using Type = DisplayPlaneCapabilities2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
  {
    using Type = DisplayPlaneCapabilities2KHR;
  };

  // wrapper struct for struct VkDisplayPlaneInfo2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneInfo2KHR.html
  struct DisplayPlaneInfo2KHR
  {
    using NativeType = VkDisplayPlaneInfo2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayPlaneInfo2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mode{ mode_ }
      , planeIndex{ planeIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
    {
    }

    DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeIndex = planeIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( this );
    }

    operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneInfo2KHR *>( this );
    }

    operator VkDisplayPlaneInfo2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( this );
    }

    operator VkDisplayPlaneInfo2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPlaneInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DisplayModeKHR const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mode, planeIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex );
#  endif
    }

    bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType      = StructureType::eDisplayPlaneInfo2KHR;
    const void *   pNext      = {};
    DisplayModeKHR mode       = {};
    uint32_t       planeIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPlaneInfo2KHR>
  {
    using Type = DisplayPlaneInfo2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
  {
    using Type = DisplayPlaneInfo2KHR;
  };

  // wrapper struct for struct VkDisplayPlanePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlanePropertiesKHR.html
  struct DisplayPlanePropertiesKHR
  {
    using NativeType = VkDisplayPlanePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : currentDisplay{ currentDisplay_ }
      , currentStackIndex{ currentStackIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
    {
    }

    DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR *>( this );
    }

    operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlanePropertiesKHR *>( this );
    }

    operator VkDisplayPlanePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPlanePropertiesKHR *>( this );
    }

    operator VkDisplayPlanePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPlanePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DisplayKHR const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( currentDisplay, currentStackIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
#else
    bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex );
#  endif
    }

    bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DisplayKHR currentDisplay    = {};
    uint32_t   currentStackIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPlanePropertiesKHR>
  {
    using Type = DisplayPlanePropertiesKHR;
  };
#endif

  // wrapper struct for struct VkDisplayPlaneProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneProperties2KHR.html
  struct DisplayPlaneProperties2KHR
  {
    using NativeType = VkDisplayPlaneProperties2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayPlaneProperties2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displayPlaneProperties{ displayPlaneProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
    {
    }

    DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR *>( this );
    }

    operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneProperties2KHR *>( this );
    }

    operator VkDisplayPlaneProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPlaneProperties2KHR *>( this );
    }

    operator VkDisplayPlaneProperties2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPlaneProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DisplayPlanePropertiesKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, displayPlaneProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties );
#  endif
    }

    bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                  = StructureType::eDisplayPlaneProperties2KHR;
    void *                    pNext                  = {};
    DisplayPlanePropertiesKHR displayPlaneProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPlaneProperties2KHR>
  {
    using Type = DisplayPlaneProperties2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
  {
    using Type = DisplayPlaneProperties2KHR;
  };

  // wrapper struct for struct VkDisplayPowerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPowerInfoEXT.html
  struct DisplayPowerInfoEXT
  {
    using NativeType = VkDisplayPowerInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayPowerInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , powerState{ powerState_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
    {
    }

    DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
    {
      powerState = powerState_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPowerInfoEXT *>( this );
    }

    operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPowerInfoEXT *>( this );
    }

    operator VkDisplayPowerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPowerInfoEXT *>( this );
    }

    operator VkDisplayPowerInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPowerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DisplayPowerStateEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, powerState );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
#else
    bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState );
#  endif
    }

    bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType      = StructureType::eDisplayPowerInfoEXT;
    const void *         pNext      = {};
    DisplayPowerStateEXT powerState = DisplayPowerStateEXT::eOff;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPowerInfoEXT>
  {
    using Type = DisplayPowerInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
  {
    using Type = DisplayPowerInfoEXT;
  };

  // wrapper struct for struct VkDisplayPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPresentInfoKHR.html
  struct DisplayPresentInfoKHR
  {
    using NativeType = VkDisplayPresentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayPresentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayPresentInfoKHR( Rect2D srcRect_ = {}, Rect2D dstRect_ = {}, Bool32 persistent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcRect{ srcRect_ }
      , dstRect{ dstRect_ }
      , persistent{ persistent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
    {
    }

    DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
    {
      srcRect = srcRect_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
    {
      dstRect = dstRect_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
    {
      persistent = persistent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPresentInfoKHR *>( this );
    }

    operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPresentInfoKHR *>( this );
    }

    operator VkDisplayPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPresentInfoKHR *>( this );
    }

    operator VkDisplayPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Rect2D const &, Rect2D const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcRect, dstRect, persistent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
#else
    bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent );
#  endif
    }

    bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eDisplayPresentInfoKHR;
    const void *  pNext      = {};
    Rect2D        srcRect    = {};
    Rect2D        dstRect    = {};
    Bool32        persistent = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPresentInfoKHR>
  {
    using Type = DisplayPresentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
  {
    using Type = DisplayPresentInfoKHR;
  };

  // wrapper struct for struct VkDisplayPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPropertiesKHR.html
  struct DisplayPropertiesKHR
  {
    using NativeType = VkDisplayPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayKHR               display_              = {},
                                               const char *             displayName_          = {},
                                               Extent2D                 physicalDimensions_   = {},
                                               Extent2D                 physicalResolution_   = {},
                                               SurfaceTransformFlagsKHR supportedTransforms_  = {},
                                               Bool32                   planeReorderPossible_ = {},
                                               Bool32                   persistentContent_    = {} ) VULKAN_HPP_NOEXCEPT
      : display{ display_ }
      , displayName{ displayName_ }
      , physicalDimensions{ physicalDimensions_ }
      , physicalResolution{ physicalResolution_ }
      , supportedTransforms{ supportedTransforms_ }
      , planeReorderPossible{ planeReorderPossible_ }
      , persistentContent{ persistentContent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
    {
    }

    DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPropertiesKHR *>( this );
    }

    operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPropertiesKHR *>( this );
    }

    operator VkDisplayPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayPropertiesKHR *>( this );
    }

    operator VkDisplayPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DisplayKHR const &, const char * const &, Extent2D const &, Extent2D const &, SurfaceTransformFlagsKHR const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = display <=> rhs.display; cmp != 0 )
        return cmp;
      if ( displayName != rhs.displayName )
        if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 )
        return cmp;
      if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 )
        return cmp;
      if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 )
        return cmp;
      if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 )
        return cmp;
      if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) &&
             ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) &&
             ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) &&
             ( persistentContent == rhs.persistentContent );
    }

    bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    DisplayKHR               display              = {};
    const char *             displayName          = {};
    Extent2D                 physicalDimensions   = {};
    Extent2D                 physicalResolution   = {};
    SurfaceTransformFlagsKHR supportedTransforms  = {};
    Bool32                   planeReorderPossible = {};
    Bool32                   persistentContent    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayPropertiesKHR>
  {
    using Type = DisplayPropertiesKHR;
  };
#endif

  // wrapper struct for struct VkDisplayProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayProperties2KHR.html
  struct DisplayProperties2KHR
  {
    using NativeType = VkDisplayProperties2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplayProperties2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayPropertiesKHR displayProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displayProperties{ displayProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
    {
    }

    DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplayProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayProperties2KHR *>( this );
    }

    operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayProperties2KHR *>( this );
    }

    operator VkDisplayProperties2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplayProperties2KHR *>( this );
    }

    operator VkDisplayProperties2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplayProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DisplayPropertiesKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, displayProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties );
#  endif
    }

    bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType             = StructureType::eDisplayProperties2KHR;
    void *               pNext             = {};
    DisplayPropertiesKHR displayProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplayProperties2KHR>
  {
    using Type = DisplayProperties2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
  {
    using Type = DisplayProperties2KHR;
  };

  // wrapper struct for struct VkDisplaySurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceCreateInfoKHR.html
  struct DisplaySurfaceCreateInfoKHR
  {
    using NativeType = VkDisplaySurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplaySurfaceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_           = {},
                                                      DisplayModeKHR               displayMode_     = {},
                                                      uint32_t                     planeIndex_      = {},
                                                      uint32_t                     planeStackIndex_ = {},
                                                      SurfaceTransformFlagBitsKHR  transform_       = SurfaceTransformFlagBitsKHR::eIdentity,
                                                      float                        globalAlpha_     = {},
                                                      DisplayPlaneAlphaFlagBitsKHR alphaMode_       = DisplayPlaneAlphaFlagBitsKHR::eOpaque,
                                                      Extent2D                     imageExtent_     = {},
                                                      const void *                 pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , displayMode{ displayMode_ }
      , planeIndex{ planeIndex_ }
      , planeStackIndex{ planeStackIndex_ }
      , transform{ transform_ }
      , globalAlpha{ globalAlpha_ }
      , alphaMode{ alphaMode_ }
      , imageExtent{ imageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
    {
      displayMode = displayMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeIndex = planeIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeStackIndex = planeStackIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
    {
      globalAlpha = globalAlpha_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaMode = alphaMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( this );
    }

    operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>( this );
    }

    operator VkDisplaySurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( this );
    }

    operator VkDisplaySurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DisplaySurfaceCreateFlagsKHR const &,
               DisplayModeKHR const &,
               uint32_t const &,
               uint32_t const &,
               SurfaceTransformFlagBitsKHR const &,
               float const &,
               DisplayPlaneAlphaFlagBitsKHR const &,
               Extent2D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
#else
    bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) &&
             ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) &&
             ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent );
#  endif
    }

    bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType           = StructureType::eDisplaySurfaceCreateInfoKHR;
    const void *                 pNext           = {};
    DisplaySurfaceCreateFlagsKHR flags           = {};
    DisplayModeKHR               displayMode     = {};
    uint32_t                     planeIndex      = {};
    uint32_t                     planeStackIndex = {};
    SurfaceTransformFlagBitsKHR  transform       = SurfaceTransformFlagBitsKHR::eIdentity;
    float                        globalAlpha     = {};
    DisplayPlaneAlphaFlagBitsKHR alphaMode       = DisplayPlaneAlphaFlagBitsKHR::eOpaque;
    Extent2D                     imageExtent     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplaySurfaceCreateInfoKHR>
  {
    using Type = DisplaySurfaceCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
  {
    using Type = DisplaySurfaceCreateInfoKHR;
  };

  // wrapper struct for struct VkDisplaySurfaceStereoCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceStereoCreateInfoNV.html
  struct DisplaySurfaceStereoCreateInfoNV
  {
    using NativeType = VkDisplaySurfaceStereoCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDisplaySurfaceStereoCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoTypeNV stereoType_ = DisplaySurfaceStereoTypeNV::eNone,
                                                           const void *               pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stereoType{ stereoType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast<DisplaySurfaceStereoCreateInfoNV const *>( &rhs ) )
    {
    }

    DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DisplaySurfaceStereoCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( DisplaySurfaceStereoTypeNV stereoType_ ) VULKAN_HPP_NOEXCEPT
    {
      stereoType = stereoType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplaySurfaceStereoCreateInfoNV *>( this );
    }

    operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplaySurfaceStereoCreateInfoNV *>( this );
    }

    operator VkDisplaySurfaceStereoCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDisplaySurfaceStereoCreateInfoNV *>( this );
    }

    operator VkDisplaySurfaceStereoCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDisplaySurfaceStereoCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DisplaySurfaceStereoTypeNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stereoType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default;
#else
    bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stereoType == rhs.stereoType );
#  endif
    }

    bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType      = StructureType::eDisplaySurfaceStereoCreateInfoNV;
    const void *               pNext      = {};
    DisplaySurfaceStereoTypeNV stereoType = DisplaySurfaceStereoTypeNV::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDisplaySurfaceStereoCreateInfoNV>
  {
    using Type = DisplaySurfaceStereoCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDisplaySurfaceStereoCreateInfoNV>
  {
    using Type = DisplaySurfaceStereoCreateInfoNV;
  };

  // wrapper struct for struct VkDrawIndexedIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndexedIndirectCommand.html
  struct DrawIndexedIndirectCommand
  {
    using NativeType = VkDrawIndexedIndirectCommand;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_    = {},
                                                     uint32_t instanceCount_ = {},
                                                     uint32_t firstIndex_    = {},
                                                     int32_t  vertexOffset_  = {},
                                                     uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
      : indexCount{ indexCount_ }
      , instanceCount{ instanceCount_ }
      , firstIndex{ firstIndex_ }
      , vertexOffset{ vertexOffset_ }
      , firstInstance{ firstInstance_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
    {
    }

    DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexCount = indexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstIndex = firstIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexOffset = vertexOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      firstInstance = firstInstance_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawIndexedIndirectCommand *>( this );
    }

    operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawIndexedIndirectCommand *>( this );
    }

    operator VkDrawIndexedIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrawIndexedIndirectCommand *>( this );
    }

    operator VkDrawIndexedIndirectCommand *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrawIndexedIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, int32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
#else
    bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) &&
             ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance );
#  endif
    }

    bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t indexCount    = {};
    uint32_t instanceCount = {};
    uint32_t firstIndex    = {};
    int32_t  vertexOffset  = {};
    uint32_t firstInstance = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrawIndexedIndirectCommand>
  {
    using Type = DrawIndexedIndirectCommand;
  };
#endif

  // wrapper struct for struct VkDrawIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCommand.html
  struct DrawIndirectCommand
  {
    using NativeType = VkDrawIndirectCommand;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_   = {},
                                              uint32_t instanceCount_ = {},
                                              uint32_t firstVertex_   = {},
                                              uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexCount{ vertexCount_ }
      , instanceCount{ instanceCount_ }
      , firstVertex{ firstVertex_ }
      , firstInstance{ firstInstance_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
    {
    }

    DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrawIndirectCommand const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstVertex = firstVertex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      firstInstance = firstInstance_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawIndirectCommand *>( this );
    }

    operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawIndirectCommand *>( this );
    }

    operator VkDrawIndirectCommand const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrawIndirectCommand *>( this );
    }

    operator VkDrawIndirectCommand *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrawIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( vertexCount, instanceCount, firstVertex, firstInstance );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawIndirectCommand const & ) const = default;
#else
    bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) &&
             ( firstInstance == rhs.firstInstance );
#  endif
    }

    bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t vertexCount   = {};
    uint32_t instanceCount = {};
    uint32_t firstVertex   = {};
    uint32_t firstInstance = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrawIndirectCommand>
  {
    using Type = DrawIndirectCommand;
  };
#endif

  // wrapper struct for struct VkDrawIndirectCountIndirectCommandEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCountIndirectCommandEXT.html
  struct DrawIndirectCountIndirectCommandEXT
  {
    using NativeType = VkDrawIndirectCountIndirectCommandEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DrawIndirectCountIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t stride_ = {}, uint32_t commandCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferAddress{ bufferAddress_ }
      , stride{ stride_ }
      , commandCount{ commandCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawIndirectCountIndirectCommandEXT( *reinterpret_cast<DrawIndirectCountIndirectCommandEXT const *>( &rhs ) )
    {
    }

    DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrawIndirectCountIndirectCommandEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandCount = commandCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawIndirectCountIndirectCommandEXT *>( this );
    }

    operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawIndirectCountIndirectCommandEXT *>( this );
    }

    operator VkDrawIndirectCountIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrawIndirectCountIndirectCommandEXT *>( this );
    }

    operator VkDrawIndirectCountIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrawIndirectCountIndirectCommandEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( bufferAddress, stride, commandCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default;
#else
    bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( bufferAddress == rhs.bufferAddress ) && ( stride == rhs.stride ) && ( commandCount == rhs.commandCount );
#  endif
    }

    bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress bufferAddress = {};
    uint32_t      stride        = {};
    uint32_t      commandCount  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrawIndirectCountIndirectCommandEXT>
  {
    using Type = DrawIndirectCountIndirectCommandEXT;
  };
#endif

  // wrapper struct for struct VkDrawMeshTasksIndirectCommandEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandEXT.html
  struct DrawMeshTasksIndirectCommandEXT
  {
    using NativeType = VkDrawMeshTasksIndirectCommandEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT
      : groupCountX{ groupCountX_ }
      , groupCountY{ groupCountY_ }
      , groupCountZ{ groupCountZ_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast<DrawMeshTasksIndirectCommandEXT const *>( &rhs ) )
    {
    }

    DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrawMeshTasksIndirectCommandEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCountX = groupCountX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCountY = groupCountY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCountZ = groupCountZ_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrawMeshTasksIndirectCommandEXT *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrawMeshTasksIndirectCommandEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( groupCountX, groupCountY, groupCountZ );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default;
#else
    bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ );
#  endif
    }

    bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t groupCountX = {};
    uint32_t groupCountY = {};
    uint32_t groupCountZ = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrawMeshTasksIndirectCommandEXT>
  {
    using Type = DrawMeshTasksIndirectCommandEXT;
  };
#endif

  // wrapper struct for struct VkDrawMeshTasksIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandNV.html
  struct DrawMeshTasksIndirectCommandNV
  {
    using NativeType = VkDrawMeshTasksIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
      : taskCount{ taskCount_ }
      , firstTask{ firstTask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
    {
    }

    DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      taskCount = taskCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
    {
      firstTask = firstTask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrawMeshTasksIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( taskCount, firstTask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default;
#else
    bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask );
#  endif
    }

    bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t taskCount = {};
    uint32_t firstTask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrawMeshTasksIndirectCommandNV>
  {
    using Type = DrawMeshTasksIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkDrmFormatModifierProperties2EXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierProperties2EXT.html
  struct DrmFormatModifierProperties2EXT
  {
    using NativeType = VkDrmFormatModifierProperties2EXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t            drmFormatModifier_               = {},
                                                          uint32_t            drmFormatModifierPlaneCount_     = {},
                                                          FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier{ drmFormatModifier_ }
      , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }
      , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
    {
    }

    DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT *>( this );
    }

    operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierProperties2EXT *>( this );
    }

    operator VkDrmFormatModifierProperties2EXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrmFormatModifierProperties2EXT *>( this );
    }

    operator VkDrmFormatModifierProperties2EXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrmFormatModifierProperties2EXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint64_t const &, uint32_t const &, FormatFeatureFlags2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
             ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
#  endif
    }

    bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint64_t            drmFormatModifier               = {};
    uint32_t            drmFormatModifierPlaneCount     = {};
    FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrmFormatModifierProperties2EXT>
  {
    using Type = DrmFormatModifierProperties2EXT;
  };
#endif

  // wrapper struct for struct VkDrmFormatModifierPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesEXT.html
  struct DrmFormatModifierPropertiesEXT
  {
    using NativeType = VkDrmFormatModifierPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t           drmFormatModifier_               = {},
                                                         uint32_t           drmFormatModifierPlaneCount_     = {},
                                                         FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier{ drmFormatModifier_ }
      , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }
      , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
    {
    }

    DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrmFormatModifierPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint64_t const &, uint32_t const &, FormatFeatureFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
             ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
#  endif
    }

    bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint64_t           drmFormatModifier               = {};
    uint32_t           drmFormatModifierPlaneCount     = {};
    FormatFeatureFlags drmFormatModifierTilingFeatures = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrmFormatModifierPropertiesEXT>
  {
    using Type = DrmFormatModifierPropertiesEXT;
  };
#endif

  // wrapper struct for struct VkDrmFormatModifierPropertiesList2EXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesList2EXT.html
  struct DrmFormatModifierPropertiesList2EXT
  {
    using NativeType = VkDrmFormatModifierPropertiesList2EXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDrmFormatModifierPropertiesList2EXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t                          drmFormatModifierCount_       = {},
                                                              DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {},
                                                              void *                            pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifierCount{ drmFormatModifierCount_ }
      , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
    {
    }

    DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesList2EXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesList2EXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, DrmFormatModifierProperties2EXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
             ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
#  endif
    }

    bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                        = StructureType::eDrmFormatModifierPropertiesList2EXT;
    void *                            pNext                        = {};
    uint32_t                          drmFormatModifierCount       = {};
    DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrmFormatModifierPropertiesList2EXT>
  {
    using Type = DrmFormatModifierPropertiesList2EXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
  {
    using Type = DrmFormatModifierPropertiesList2EXT;
  };

  // wrapper struct for struct VkDrmFormatModifierPropertiesListEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesListEXT.html
  struct DrmFormatModifierPropertiesListEXT
  {
    using NativeType = VkDrmFormatModifierPropertiesListEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eDrmFormatModifierPropertiesListEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t                         drmFormatModifierCount_       = {},
                                                             DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {},
                                                             void *                           pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifierCount{ drmFormatModifierCount_ }
      , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
    {
    }

    DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesListEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesListEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, DrmFormatModifierPropertiesEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
             ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
#  endif
    }

    bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                        = StructureType::eDrmFormatModifierPropertiesListEXT;
    void *                           pNext                        = {};
    uint32_t                         drmFormatModifierCount       = {};
    DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkDrmFormatModifierPropertiesListEXT>
  {
    using Type = DrmFormatModifierPropertiesListEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
  {
    using Type = DrmFormatModifierPropertiesListEXT;
  };

  // wrapper struct for struct VkEventCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateInfo.html
  struct EventCreateInfo
  {
    using NativeType = VkEventCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eEventCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) ) {}

    EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<EventCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkEventCreateInfo *>( this );
    }

    operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkEventCreateInfo *>( this );
    }

    operator VkEventCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkEventCreateInfo *>( this );
    }

    operator VkEventCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkEventCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, EventCreateFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( EventCreateInfo const & ) const = default;
#else
    bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType = StructureType::eEventCreateInfo;
    const void *     pNext = {};
    EventCreateFlags flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkEventCreateInfo>
  {
    using Type = EventCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eEventCreateInfo>
  {
    using Type = EventCreateInfo;
  };

  // wrapper struct for struct VkPipelineLibraryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLibraryCreateInfoKHR.html
  struct PipelineLibraryCreateInfoKHR
  {
    using NativeType = VkPipelineLibraryCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineLibraryCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, const Pipeline * pLibraries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , libraryCount{ libraryCount_ }
      , pLibraries{ pLibraries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLibraryCreateInfoKHR( ArrayProxyNoTemporaries<const Pipeline> const & libraries_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      libraryCount = libraryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraries = pLibraries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLibraryCreateInfoKHR & setLibraries( ArrayProxyNoTemporaries<const Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
    {
      libraryCount = static_cast<uint32_t>( libraries_.size() );
      pLibraries   = libraries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
    }

    operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
    }

    operator VkPipelineLibraryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
    }

    operator VkPipelineLibraryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Pipeline * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, libraryCount, pLibraries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries );
#  endif
    }

    bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType        = StructureType::ePipelineLibraryCreateInfoKHR;
    const void *     pNext        = {};
    uint32_t         libraryCount = {};
    const Pipeline * pLibraries   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineLibraryCreateInfoKHR>
  {
    using Type = PipelineLibraryCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
  {
    using Type = PipelineLibraryCreateInfoKHR;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkExecutionGraphPipelineCreateInfoAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineCreateInfoAMDX.html
  struct ExecutionGraphPipelineCreateInfoAMDX
  {
    using NativeType = VkExecutionGraphPipelineCreateInfoAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExecutionGraphPipelineCreateInfoAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( PipelineCreateFlags                   flags_              = {},
                                                               uint32_t                              stageCount_         = {},
                                                               const PipelineShaderStageCreateInfo * pStages_            = {},
                                                               const PipelineLibraryCreateInfoKHR *  pLibraryInfo_       = {},
                                                               PipelineLayout                        layout_             = {},
                                                               Pipeline                              basePipelineHandle_ = {},
                                                               int32_t                               basePipelineIndex_  = {},
                                                               const void *                          pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stageCount{ stageCount_ }
      , pStages{ pStages_ }
      , pLibraryInfo{ pLibraryInfo_ }
      , layout{ layout_ }
      , basePipelineHandle{ basePipelineHandle_ }
      , basePipelineIndex{ basePipelineIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast<ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ExecutionGraphPipelineCreateInfoAMDX( PipelineCreateFlags                                                  flags_,
                                          ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_,
                                          const PipelineLibraryCreateInfoKHR *                                 pLibraryInfo_       = {},
                                          PipelineLayout                                                       layout_             = {},
                                          Pipeline                                                             basePipelineHandle_ = {},
                                          int32_t                                                              basePipelineIndex_  = {},
                                          const void *                                                         pNext_              = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , pLibraryInfo( pLibraryInfo_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExecutionGraphPipelineCreateInfoAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ExecutionGraphPipelineCreateInfoAMDX & setStages( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPLibraryInfo( const PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraryInfo = pLibraryInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineHandle( Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( this );
    }

    operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExecutionGraphPipelineCreateInfoAMDX *>( this );
    }

    operator VkExecutionGraphPipelineCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( this );
    }

    operator VkExecutionGraphPipelineCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExecutionGraphPipelineCreateInfoAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags const &,
               uint32_t const &,
               const PipelineShaderStageCreateInfo * const &,
               const PipelineLibraryCreateInfoKHR * const &,
               PipelineLayout const &,
               Pipeline const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default;
#  else
    bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
             ( basePipelineIndex == rhs.basePipelineIndex );
#    endif
    }

    bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                         sType              = StructureType::eExecutionGraphPipelineCreateInfoAMDX;
    const void *                          pNext              = {};
    PipelineCreateFlags                   flags              = {};
    uint32_t                              stageCount         = {};
    const PipelineShaderStageCreateInfo * pStages            = {};
    const PipelineLibraryCreateInfoKHR *  pLibraryInfo       = {};
    PipelineLayout                        layout             = {};
    Pipeline                              basePipelineHandle = {};
    int32_t                               basePipelineIndex  = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExecutionGraphPipelineCreateInfoAMDX>
  {
    using Type = ExecutionGraphPipelineCreateInfoAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExecutionGraphPipelineCreateInfoAMDX>
  {
    using Type = ExecutionGraphPipelineCreateInfoAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkExecutionGraphPipelineScratchSizeAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineScratchSizeAMDX.html
  struct ExecutionGraphPipelineScratchSizeAMDX
  {
    using NativeType = VkExecutionGraphPipelineScratchSizeAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExecutionGraphPipelineScratchSizeAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( DeviceSize minSize_         = {},
                                                                DeviceSize maxSize_         = {},
                                                                DeviceSize sizeGranularity_ = {},
                                                                void *     pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minSize{ minSize_ }
      , maxSize{ maxSize_ }
      , sizeGranularity{ sizeGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast<ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs ) )
    {
    }

    ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExecutionGraphPipelineScratchSizeAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT
    {
      minSize = minSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSize = maxSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT
    {
      sizeGranularity = sizeGranularity_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExecutionGraphPipelineScratchSizeAMDX *>( this );
    }

    operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( this );
    }

    operator VkExecutionGraphPipelineScratchSizeAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExecutionGraphPipelineScratchSizeAMDX *>( this );
    }

    operator VkExecutionGraphPipelineScratchSizeAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minSize, maxSize, sizeGranularity );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default;
#  else
    bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) &&
             ( sizeGranularity == rhs.sizeGranularity );
#    endif
    }

    bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType           = StructureType::eExecutionGraphPipelineScratchSizeAMDX;
    void *        pNext           = {};
    DeviceSize    minSize         = {};
    DeviceSize    maxSize         = {};
    DeviceSize    sizeGranularity = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExecutionGraphPipelineScratchSizeAMDX>
  {
    using Type = ExecutionGraphPipelineScratchSizeAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExecutionGraphPipelineScratchSizeAMDX>
  {
    using Type = ExecutionGraphPipelineScratchSizeAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkExportFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceCreateInfo.html
  struct ExportFenceCreateInfo
  {
    using NativeType = VkExportFenceCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportFenceCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
    {
    }

    ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportFenceCreateInfo *>( this );
    }

    operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportFenceCreateInfo *>( this );
    }

    operator VkExportFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportFenceCreateInfo *>( this );
    }

    operator VkExportFenceCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportFenceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalFenceHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportFenceCreateInfo const & ) const = default;
#else
    bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType       = StructureType::eExportFenceCreateInfo;
    const void *                 pNext       = {};
    ExternalFenceHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportFenceCreateInfo>
  {
    using Type = ExportFenceCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
  {
    using Type = ExportFenceCreateInfo;
  };

  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkExportFenceWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceWin32HandleInfoKHR.html
  struct ExportFenceWin32HandleInfoKHR
  {
    using NativeType = VkExportFenceWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportFenceWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                        DWORD                       dwAccess_    = {},
                                                        LPCWSTR                     name_        = {},
                                                        const void *                pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pAttributes{ pAttributes_ }
      , dwAccess{ dwAccess_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkExportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkExportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pAttributes, dwAccess, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType       = StructureType::eExportFenceWin32HandleInfoKHR;
    const void *                pNext       = {};
    const SECURITY_ATTRIBUTES * pAttributes = {};
    DWORD                       dwAccess    = {};
    LPCWSTR                     name        = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportFenceWin32HandleInfoKHR>
  {
    using Type = ExportFenceWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
  {
    using Type = ExportFenceWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkExportMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfo.html
  struct ExportMemoryAllocateInfo
  {
    using NativeType = VkExportMemoryAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMemoryAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
    {
    }

    ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryAllocateInfo *>( this );
    }

    operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryAllocateInfo *>( this );
    }

    operator VkExportMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMemoryAllocateInfo *>( this );
    }

    operator VkExportMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMemoryAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
#else
    bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType       = StructureType::eExportMemoryAllocateInfo;
    const void *                  pNext       = {};
    ExternalMemoryHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMemoryAllocateInfo>
  {
    using Type = ExportMemoryAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
  {
    using Type = ExportMemoryAllocateInfo;
  };

  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;

  // wrapper struct for struct VkExportMemoryAllocateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfoNV.html
  struct ExportMemoryAllocateInfoNV
  {
    using NativeType = VkExportMemoryAllocateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMemoryAllocateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
    {
    }

    ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV *>( this );
    }

    operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryAllocateInfoNV *>( this );
    }

    operator VkExportMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMemoryAllocateInfoNV *>( this );
    }

    operator VkExportMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMemoryAllocateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagsNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default;
#else
    bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType       = StructureType::eExportMemoryAllocateInfoNV;
    const void *                    pNext       = {};
    ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMemoryAllocateInfoNV>
  {
    using Type = ExportMemoryAllocateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
  {
    using Type = ExportMemoryAllocateInfoNV;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkExportMemoryWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoKHR.html
  struct ExportMemoryWin32HandleInfoKHR
  {
    using NativeType = VkExportMemoryWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMemoryWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                         DWORD                       dwAccess_    = {},
                                                         LPCWSTR                     name_        = {},
                                                         const void *                pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pAttributes{ pAttributes_ }
      , dwAccess{ dwAccess_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkExportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkExportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pAttributes, dwAccess, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType       = StructureType::eExportMemoryWin32HandleInfoKHR;
    const void *                pNext       = {};
    const SECURITY_ATTRIBUTES * pAttributes = {};
    DWORD                       dwAccess    = {};
    LPCWSTR                     name        = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMemoryWin32HandleInfoKHR>
  {
    using Type = ExportMemoryWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
  {
    using Type = ExportMemoryWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkExportMemoryWin32HandleInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoNV.html
  struct ExportMemoryWin32HandleInfoNV
  {
    using NativeType = VkExportMemoryWin32HandleInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMemoryWin32HandleInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pAttributes{ pAttributes_ }
      , dwAccess{ dwAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
    {
    }

    ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkExportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkExportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pAttributes, dwAccess );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default;
#  else
    bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess );
#    endif
    }

    bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType       = StructureType::eExportMemoryWin32HandleInfoNV;
    const void *                pNext       = {};
    const SECURITY_ATTRIBUTES * pAttributes = {};
    DWORD                       dwAccess    = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMemoryWin32HandleInfoNV>
  {
    using Type = ExportMemoryWin32HandleInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
  {
    using Type = ExportMemoryWin32HandleInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalBufferInfoEXT.html
  struct ExportMetalBufferInfoEXT
  {
    using NativeType = VkExportMetalBufferInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalBufferInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( DeviceMemory memory_ = {}, MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , mtlBuffer{ mtlBuffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalBufferInfoEXT( *reinterpret_cast<ExportMetalBufferInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalBufferInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlBuffer = mtlBuffer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalBufferInfoEXT *>( this );
    }

    operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalBufferInfoEXT *>( this );
    }

    operator VkExportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalBufferInfoEXT *>( this );
    }

    operator VkExportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalBufferInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, MTLBuffer_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, mtlBuffer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer );
#    endif
    }

    bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType     = StructureType::eExportMetalBufferInfoEXT;
    const void *  pNext     = {};
    DeviceMemory  memory    = {};
    MTLBuffer_id  mtlBuffer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalBufferInfoEXT>
  {
    using Type = ExportMetalBufferInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalBufferInfoEXT>
  {
    using Type = ExportMetalBufferInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalCommandQueueInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalCommandQueueInfoEXT.html
  struct ExportMetalCommandQueueInfoEXT
  {
    using NativeType = VkExportMetalCommandQueueInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalCommandQueueInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ExportMetalCommandQueueInfoEXT( Queue queue_ = {}, MTLCommandQueue_id mtlCommandQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queue{ queue_ }
      , mtlCommandQueue{ mtlCommandQueue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalCommandQueueInfoEXT( *reinterpret_cast<ExportMetalCommandQueueInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalCommandQueueInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( Queue queue_ ) VULKAN_HPP_NOEXCEPT
    {
      queue = queue_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlCommandQueue = mtlCommandQueue_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalCommandQueueInfoEXT *>( this );
    }

    operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalCommandQueueInfoEXT *>( this );
    }

    operator VkExportMetalCommandQueueInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalCommandQueueInfoEXT *>( this );
    }

    operator VkExportMetalCommandQueueInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalCommandQueueInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Queue const &, MTLCommandQueue_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queue, mtlCommandQueue );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue );
#    endif
    }

    bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType      sType           = StructureType::eExportMetalCommandQueueInfoEXT;
    const void *       pNext           = {};
    Queue              queue           = {};
    MTLCommandQueue_id mtlCommandQueue = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalCommandQueueInfoEXT>
  {
    using Type = ExportMetalCommandQueueInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalCommandQueueInfoEXT>
  {
    using Type = ExportMetalCommandQueueInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalDeviceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalDeviceInfoEXT.html
  struct ExportMetalDeviceInfoEXT
  {
    using NativeType = VkExportMetalDeviceInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalDeviceInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mtlDevice{ mtlDevice_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalDeviceInfoEXT( *reinterpret_cast<ExportMetalDeviceInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalDeviceInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlDevice = mtlDevice_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalDeviceInfoEXT *>( this );
    }

    operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalDeviceInfoEXT *>( this );
    }

    operator VkExportMetalDeviceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalDeviceInfoEXT *>( this );
    }

    operator VkExportMetalDeviceInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalDeviceInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MTLDevice_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mtlDevice );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice );
#    endif
    }

    bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType     = StructureType::eExportMetalDeviceInfoEXT;
    const void *  pNext     = {};
    MTLDevice_id  mtlDevice = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalDeviceInfoEXT>
  {
    using Type = ExportMetalDeviceInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalDeviceInfoEXT>
  {
    using Type = ExportMetalDeviceInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalIOSurfaceInfoEXT.html
  struct ExportMetalIOSurfaceInfoEXT
  {
    using NativeType = VkExportMetalIOSurfaceInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalIoSurfaceInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , ioSurface{ ioSurface_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast<ExportMetalIOSurfaceInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalIOSurfaceInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
    {
      ioSurface = ioSurface_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkExportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkExportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalIOSurfaceInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &, IOSurfaceRef const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, ioSurface );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface );
#    endif
    }

    bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType     = StructureType::eExportMetalIoSurfaceInfoEXT;
    const void *  pNext     = {};
    Image         image     = {};
    IOSurfaceRef  ioSurface = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalIOSurfaceInfoEXT>
  {
    using Type = ExportMetalIOSurfaceInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalIoSurfaceInfoEXT>
  {
    using Type = ExportMetalIOSurfaceInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalObjectCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectCreateInfoEXT.html
  struct ExportMetalObjectCreateInfoEXT
  {
    using NativeType = VkExportMetalObjectCreateInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalObjectCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = ExportMetalObjectTypeFlagBitsEXT::eMetalDevice,
                                                         const void *                     pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , exportObjectType{ exportObjectType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalObjectCreateInfoEXT( *reinterpret_cast<ExportMetalObjectCreateInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalObjectCreateInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setExportObjectType( ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT
    {
      exportObjectType = exportObjectType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalObjectCreateInfoEXT *>( this );
    }

    operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalObjectCreateInfoEXT *>( this );
    }

    operator VkExportMetalObjectCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalObjectCreateInfoEXT *>( this );
    }

    operator VkExportMetalObjectCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalObjectCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExportMetalObjectTypeFlagBitsEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, exportObjectType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType );
#    endif
    }

    bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType            = StructureType::eExportMetalObjectCreateInfoEXT;
    const void *                     pNext            = {};
    ExportMetalObjectTypeFlagBitsEXT exportObjectType = ExportMetalObjectTypeFlagBitsEXT::eMetalDevice;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalObjectCreateInfoEXT>
  {
    using Type = ExportMetalObjectCreateInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalObjectCreateInfoEXT>
  {
    using Type = ExportMetalObjectCreateInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalObjectsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectsInfoEXT.html
  struct ExportMetalObjectsInfoEXT
  {
    using NativeType = VkExportMetalObjectsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalObjectsInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalObjectsInfoEXT( *reinterpret_cast<ExportMetalObjectsInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalObjectsInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalObjectsInfoEXT *>( this );
    }

    operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalObjectsInfoEXT *>( this );
    }

    operator VkExportMetalObjectsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalObjectsInfoEXT *>( this );
    }

    operator VkExportMetalObjectsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalObjectsInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#    endif
    }

    bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType = StructureType::eExportMetalObjectsInfoEXT;
    const void *  pNext = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalObjectsInfoEXT>
  {
    using Type = ExportMetalObjectsInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalObjectsInfoEXT>
  {
    using Type = ExportMetalObjectsInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalSharedEventInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalSharedEventInfoEXT.html
  struct ExportMetalSharedEventInfoEXT
  {
    using NativeType = VkExportMetalSharedEventInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalSharedEventInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( Semaphore         semaphore_      = {},
                                                        Event             event_          = {},
                                                        MTLSharedEvent_id mtlSharedEvent_ = {},
                                                        const void *      pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , event{ event_ }
      , mtlSharedEvent{ mtlSharedEvent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalSharedEventInfoEXT( *reinterpret_cast<ExportMetalSharedEventInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalSharedEventInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( Event event_ ) VULKAN_HPP_NOEXCEPT
    {
      event = event_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlSharedEvent = mtlSharedEvent_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalSharedEventInfoEXT *>( this );
    }

    operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalSharedEventInfoEXT *>( this );
    }

    operator VkExportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalSharedEventInfoEXT *>( this );
    }

    operator VkExportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalSharedEventInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, Event const &, MTLSharedEvent_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, event, mtlSharedEvent );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) &&
             ( mtlSharedEvent == rhs.mtlSharedEvent );
#    endif
    }

    bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType     sType          = StructureType::eExportMetalSharedEventInfoEXT;
    const void *      pNext          = {};
    Semaphore         semaphore      = {};
    Event             event          = {};
    MTLSharedEvent_id mtlSharedEvent = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalSharedEventInfoEXT>
  {
    using Type = ExportMetalSharedEventInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalSharedEventInfoEXT>
  {
    using Type = ExportMetalSharedEventInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkExportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalTextureInfoEXT.html
  struct ExportMetalTextureInfoEXT
  {
    using NativeType = VkExportMetalTextureInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportMetalTextureInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( Image               image_      = {},
                                                    ImageView           imageView_  = {},
                                                    BufferView          bufferView_ = {},
                                                    ImageAspectFlagBits plane_      = ImageAspectFlagBits::eColor,
                                                    MTLTexture_id       mtlTexture_ = {},
                                                    const void *        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , imageView{ imageView_ }
      , bufferView{ bufferView_ }
      , plane{ plane_ }
      , mtlTexture{ mtlTexture_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMetalTextureInfoEXT( *reinterpret_cast<ExportMetalTextureInfoEXT const *>( &rhs ) )
    {
    }

    ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportMetalTextureInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferView = bufferView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
    {
      plane = plane_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlTexture = mtlTexture_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMetalTextureInfoEXT *>( this );
    }

    operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMetalTextureInfoEXT *>( this );
    }

    operator VkExportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportMetalTextureInfoEXT *>( this );
    }

    operator VkExportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportMetalTextureInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Image const &,
               ImageView const &,
               BufferView const &,
               ImageAspectFlagBits const &,
               MTLTexture_id const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default;
#  else
    bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) &&
             ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture );
#    endif
    }

    bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType       sType      = StructureType::eExportMetalTextureInfoEXT;
    const void *        pNext      = {};
    Image               image      = {};
    ImageView           imageView  = {};
    BufferView          bufferView = {};
    ImageAspectFlagBits plane      = ImageAspectFlagBits::eColor;
    MTLTexture_id       mtlTexture = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportMetalTextureInfoEXT>
  {
    using Type = ExportMetalTextureInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportMetalTextureInfoEXT>
  {
    using Type = ExportMetalTextureInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

  // wrapper struct for struct VkExportSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreCreateInfo.html
  struct ExportSemaphoreCreateInfo
  {
    using NativeType = VkExportSemaphoreCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportSemaphoreCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
    {
    }

    ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportSemaphoreCreateInfo *>( this );
    }

    operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportSemaphoreCreateInfo *>( this );
    }

    operator VkExportSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportSemaphoreCreateInfo *>( this );
    }

    operator VkExportSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportSemaphoreCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalSemaphoreHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
#else
    bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType       = StructureType::eExportSemaphoreCreateInfo;
    const void *                     pNext       = {};
    ExternalSemaphoreHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportSemaphoreCreateInfo>
  {
    using Type = ExportSemaphoreCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
  {
    using Type = ExportSemaphoreCreateInfo;
  };

  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkExportSemaphoreWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreWin32HandleInfoKHR.html
  struct ExportSemaphoreWin32HandleInfoKHR
  {
    using NativeType = VkExportSemaphoreWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExportSemaphoreWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                            DWORD                       dwAccess_    = {},
                                                            LPCWSTR                     name_        = {},
                                                            const void *                pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pAttributes{ pAttributes_ }
      , dwAccess{ dwAccess_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkExportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkExportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pAttributes, dwAccess, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType       = StructureType::eExportSemaphoreWin32HandleInfoKHR;
    const void *                pNext       = {};
    const SECURITY_ATTRIBUTES * pAttributes = {};
    DWORD                       dwAccess    = {};
    LPCWSTR                     name        = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ExportSemaphoreWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ExportSemaphoreWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkExtensionProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtensionProperties.html
  struct ExtensionProperties
  {
    using NativeType = VkExtensionProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {},
                                                 uint32_t                                             specVersion_   = {} ) VULKAN_HPP_NOEXCEPT
      : extensionName{ extensionName_ }
      , specVersion{ specVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
    {
    }

    ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExtensionProperties const *>( &rhs );
      return *this;
    }

    operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtensionProperties *>( this );
    }

    operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtensionProperties *>( this );
    }

    operator VkExtensionProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExtensionProperties *>( this );
    }

    operator VkExtensionProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExtensionProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( extensionName, specVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion );
    }

    bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
    uint32_t                                         specVersion   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExtensionProperties>
  {
    using Type = ExtensionProperties;
  };
#endif

  // wrapper struct for struct VkExternalMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryProperties.html
  struct ExternalMemoryProperties
  {
    using NativeType = VkExternalMemoryProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryFeatureFlags    externalMemoryFeatures_        = {},
                                                   ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
                                                   ExternalMemoryHandleTypeFlags compatibleHandleTypes_         = {} ) VULKAN_HPP_NOEXCEPT
      : externalMemoryFeatures{ externalMemoryFeatures_ }
      , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }
      , compatibleHandleTypes{ compatibleHandleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
    {
    }

    ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryProperties *>( this );
    }

    operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryProperties *>( this );
    }

    operator VkExternalMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryProperties *>( this );
    }

    operator VkExternalMemoryProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ExternalMemoryFeatureFlags const &, ExternalMemoryHandleTypeFlags const &, ExternalMemoryHandleTypeFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryProperties const & ) const = default;
#else
    bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes );
#  endif
    }

    bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ExternalMemoryFeatureFlags    externalMemoryFeatures        = {};
    ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
    ExternalMemoryHandleTypeFlags compatibleHandleTypes         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryProperties>
  {
    using Type = ExternalMemoryProperties;
  };
#endif
  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;

  // wrapper struct for struct VkExternalBufferProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalBufferProperties.html
  struct ExternalBufferProperties
  {
    using NativeType = VkExternalBufferProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalBufferProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalMemoryProperties{ externalMemoryProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
    {
    }

    ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalBufferProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalBufferProperties *>( this );
    }

    operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalBufferProperties *>( this );
    }

    operator VkExternalBufferProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalBufferProperties *>( this );
    }

    operator VkExternalBufferProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalBufferProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ExternalMemoryProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalMemoryProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalBufferProperties const & ) const = default;
#else
    bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
#  endif
    }

    bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                    = StructureType::eExternalBufferProperties;
    void *                   pNext                    = {};
    ExternalMemoryProperties externalMemoryProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalBufferProperties>
  {
    using Type = ExternalBufferProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalBufferProperties>
  {
    using Type = ExternalBufferProperties;
  };

  using ExternalBufferPropertiesKHR = ExternalBufferProperties;

  // wrapper struct for struct VkExternalComputeQueueCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueCreateInfoNV.html
  struct ExternalComputeQueueCreateInfoNV
  {
    using NativeType = VkExternalComputeQueueCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalComputeQueueCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( Queue preferredQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , preferredQueue{ preferredQueue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalComputeQueueCreateInfoNV( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalComputeQueueCreateInfoNV( *reinterpret_cast<ExternalComputeQueueCreateInfoNV const *>( &rhs ) )
    {
    }

    ExternalComputeQueueCreateInfoNV & operator=( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalComputeQueueCreateInfoNV & operator=( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalComputeQueueCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPreferredQueue( Queue preferredQueue_ ) VULKAN_HPP_NOEXCEPT
    {
      preferredQueue = preferredQueue_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalComputeQueueCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalComputeQueueCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalComputeQueueCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalComputeQueueCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalComputeQueueCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Queue const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, preferredQueue );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalComputeQueueCreateInfoNV const & ) const = default;
#else
    bool operator==( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredQueue == rhs.preferredQueue );
#  endif
    }

    bool operator!=( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eExternalComputeQueueCreateInfoNV;
    const void *  pNext          = {};
    Queue         preferredQueue = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalComputeQueueCreateInfoNV>
  {
    using Type = ExternalComputeQueueCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalComputeQueueCreateInfoNV>
  {
    using Type = ExternalComputeQueueCreateInfoNV;
  };

  // wrapper struct for struct VkExternalComputeQueueDataParamsNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDataParamsNV.html
  struct ExternalComputeQueueDataParamsNV
  {
    using NativeType = VkExternalComputeQueueDataParamsNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalComputeQueueDataParamsNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceIndex{ deviceIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalComputeQueueDataParamsNV( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalComputeQueueDataParamsNV( *reinterpret_cast<ExternalComputeQueueDataParamsNV const *>( &rhs ) )
    {
    }

    ExternalComputeQueueDataParamsNV & operator=( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalComputeQueueDataParamsNV & operator=( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalComputeQueueDataParamsNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndex = deviceIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalComputeQueueDataParamsNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalComputeQueueDataParamsNV *>( this );
    }

    operator VkExternalComputeQueueDataParamsNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalComputeQueueDataParamsNV *>( this );
    }

    operator VkExternalComputeQueueDataParamsNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalComputeQueueDataParamsNV *>( this );
    }

    operator VkExternalComputeQueueDataParamsNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalComputeQueueDataParamsNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalComputeQueueDataParamsNV const & ) const = default;
#else
    bool operator==( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndex == rhs.deviceIndex );
#  endif
    }

    bool operator!=( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::eExternalComputeQueueDataParamsNV;
    const void *  pNext       = {};
    uint32_t      deviceIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalComputeQueueDataParamsNV>
  {
    using Type = ExternalComputeQueueDataParamsNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalComputeQueueDataParamsNV>
  {
    using Type = ExternalComputeQueueDataParamsNV;
  };

  // wrapper struct for struct VkExternalComputeQueueDeviceCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDeviceCreateInfoNV.html
  struct ExternalComputeQueueDeviceCreateInfoNV
  {
    using NativeType = VkExternalComputeQueueDeviceCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalComputeQueueDeviceCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( uint32_t reservedExternalQueues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , reservedExternalQueues{ reservedExternalQueues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalComputeQueueDeviceCreateInfoNV( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalComputeQueueDeviceCreateInfoNV( *reinterpret_cast<ExternalComputeQueueDeviceCreateInfoNV const *>( &rhs ) )
    {
    }

    ExternalComputeQueueDeviceCreateInfoNV & operator=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalComputeQueueDeviceCreateInfoNV & operator=( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalComputeQueueDeviceCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setReservedExternalQueues( uint32_t reservedExternalQueues_ ) VULKAN_HPP_NOEXCEPT
    {
      reservedExternalQueues = reservedExternalQueues_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalComputeQueueDeviceCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalComputeQueueDeviceCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueDeviceCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalComputeQueueDeviceCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueDeviceCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalComputeQueueDeviceCreateInfoNV *>( this );
    }

    operator VkExternalComputeQueueDeviceCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalComputeQueueDeviceCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, reservedExternalQueues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalComputeQueueDeviceCreateInfoNV const & ) const = default;
#else
    bool operator==( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reservedExternalQueues == rhs.reservedExternalQueues );
#  endif
    }

    bool operator!=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::eExternalComputeQueueDeviceCreateInfoNV;
    const void *  pNext                  = {};
    uint32_t      reservedExternalQueues = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalComputeQueueDeviceCreateInfoNV>
  {
    using Type = ExternalComputeQueueDeviceCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalComputeQueueDeviceCreateInfoNV>
  {
    using Type = ExternalComputeQueueDeviceCreateInfoNV;
  };

  // wrapper struct for struct VkExternalFenceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceProperties.html
  struct ExternalFenceProperties
  {
    using NativeType = VkExternalFenceProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalFenceProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
                                                  ExternalFenceHandleTypeFlags compatibleHandleTypes_         = {},
                                                  ExternalFenceFeatureFlags    externalFenceFeatures_         = {},
                                                  void *                       pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }
      , compatibleHandleTypes{ compatibleHandleTypes_ }
      , externalFenceFeatures{ externalFenceFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
    {
    }

    ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalFenceProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFenceProperties *>( this );
    }

    operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFenceProperties *>( this );
    }

    operator VkExternalFenceProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalFenceProperties *>( this );
    }

    operator VkExternalFenceProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalFenceProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ExternalFenceHandleTypeFlags const &,
               ExternalFenceHandleTypeFlags const &,
               ExternalFenceFeatureFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFenceProperties const & ) const = default;
#else
    bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures );
#  endif
    }

    bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                         = StructureType::eExternalFenceProperties;
    void *                       pNext                         = {};
    ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
    ExternalFenceHandleTypeFlags compatibleHandleTypes         = {};
    ExternalFenceFeatureFlags    externalFenceFeatures         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalFenceProperties>
  {
    using Type = ExternalFenceProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalFenceProperties>
  {
    using Type = ExternalFenceProperties;
  };

  using ExternalFencePropertiesKHR = ExternalFenceProperties;

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkExternalFormatANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatANDROID.html
  struct ExternalFormatANDROID
  {
    using NativeType = VkExternalFormatANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalFormatANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalFormat{ externalFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
    {
    }

    ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalFormatANDROID const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      externalFormat = externalFormat_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFormatANDROID *>( this );
    }

    operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFormatANDROID *>( this );
    }

    operator VkExternalFormatANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalFormatANDROID *>( this );
    }

    operator VkExternalFormatANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalFormatANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalFormat );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFormatANDROID const & ) const = default;
#  else
    bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
#    endif
    }

    bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eExternalFormatANDROID;
    void *        pNext          = {};
    uint64_t      externalFormat = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalFormatANDROID>
  {
    using Type = ExternalFormatANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExternalFormatANDROID>
  {
    using Type = ExternalFormatANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkExternalFormatOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatOHOS.html
  struct ExternalFormatOHOS
  {
    using NativeType = VkExternalFormatOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalFormatOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFormatOHOS( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalFormat{ externalFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalFormatOHOS( ExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFormatOHOS( VkExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatOHOS( *reinterpret_cast<ExternalFormatOHOS const *>( &rhs ) ) {}

    ExternalFormatOHOS & operator=( ExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalFormatOHOS & operator=( VkExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalFormatOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      externalFormat = externalFormat_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalFormatOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFormatOHOS *>( this );
    }

    operator VkExternalFormatOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFormatOHOS *>( this );
    }

    operator VkExternalFormatOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalFormatOHOS *>( this );
    }

    operator VkExternalFormatOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalFormatOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalFormat );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFormatOHOS const & ) const = default;
#  else
    bool operator==( ExternalFormatOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
#    endif
    }

    bool operator!=( ExternalFormatOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eExternalFormatOHOS;
    void *        pNext          = {};
    uint64_t      externalFormat = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalFormatOHOS>
  {
    using Type = ExternalFormatOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExternalFormatOHOS>
  {
    using Type = ExternalFormatOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkExternalFormatQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatQNX.html
  struct ExternalFormatQNX
  {
    using NativeType = VkExternalFormatQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalFormatQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalFormat{ externalFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast<ExternalFormatQNX const *>( &rhs ) ) {}

    ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalFormatQNX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      externalFormat = externalFormat_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFormatQNX *>( this );
    }

    operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFormatQNX *>( this );
    }

    operator VkExternalFormatQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalFormatQNX *>( this );
    }

    operator VkExternalFormatQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalFormatQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalFormat );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFormatQNX const & ) const = default;
#  else
    bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
#    endif
    }

    bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eExternalFormatQNX;
    void *        pNext          = {};
    uint64_t      externalFormat = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalFormatQNX>
  {
    using Type = ExternalFormatQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eExternalFormatQNX>
  {
    using Type = ExternalFormatQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

  // wrapper struct for struct VkExternalImageFormatProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatProperties.html
  struct ExternalImageFormatProperties
  {
    using NativeType = VkExternalImageFormatProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalImageFormatProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalMemoryProperties{ externalMemoryProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
    {
    }

    ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalImageFormatProperties *>( this );
    }

    operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalImageFormatProperties *>( this );
    }

    operator VkExternalImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalImageFormatProperties *>( this );
    }

    operator VkExternalImageFormatProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ExternalMemoryProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalMemoryProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalImageFormatProperties const & ) const = default;
#else
    bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
#  endif
    }

    bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                    = StructureType::eExternalImageFormatProperties;
    void *                   pNext                    = {};
    ExternalMemoryProperties externalMemoryProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalImageFormatProperties>
  {
    using Type = ExternalImageFormatProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
  {
    using Type = ExternalImageFormatProperties;
  };

  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;

  // wrapper struct for struct VkImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties.html
  struct ImageFormatProperties
  {
    using NativeType = VkImageFormatProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageFormatProperties( Extent3D         maxExtent_       = {},
                                                uint32_t         maxMipLevels_    = {},
                                                uint32_t         maxArrayLayers_  = {},
                                                SampleCountFlags sampleCounts_    = {},
                                                DeviceSize       maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxExtent{ maxExtent_ }
      , maxMipLevels{ maxMipLevels_ }
      , maxArrayLayers{ maxArrayLayers_ }
      , sampleCounts{ sampleCounts_ }
      , maxResourceSize{ maxResourceSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
    {
    }

    ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatProperties *>( this );
    }

    operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatProperties *>( this );
    }

    operator VkImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageFormatProperties *>( this );
    }

    operator VkImageFormatProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Extent3D const &, uint32_t const &, uint32_t const &, SampleCountFlags const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatProperties const & ) const = default;
#else
    bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) &&
             ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize );
#  endif
    }

    bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Extent3D         maxExtent       = {};
    uint32_t         maxMipLevels    = {};
    uint32_t         maxArrayLayers  = {};
    SampleCountFlags sampleCounts    = {};
    DeviceSize       maxResourceSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageFormatProperties>
  {
    using Type = ImageFormatProperties;
  };
#endif

  // wrapper struct for struct VkExternalImageFormatPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatPropertiesNV.html
  struct ExternalImageFormatPropertiesNV
  {
    using NativeType = VkExternalImageFormatPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ImageFormatProperties           imageFormatProperties_         = {},
                                                          ExternalMemoryFeatureFlagsNV    externalMemoryFeatures_        = {},
                                                          ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
                                                          ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_         = {} ) VULKAN_HPP_NOEXCEPT
      : imageFormatProperties{ imageFormatProperties_ }
      , externalMemoryFeatures{ externalMemoryFeatures_ }
      , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }
      , compatibleHandleTypes{ compatibleHandleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
    {
    }

    ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV *>( this );
    }

    operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalImageFormatPropertiesNV *>( this );
    }

    operator VkExternalImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalImageFormatPropertiesNV *>( this );
    }

    operator VkExternalImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalImageFormatPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageFormatProperties const &,
               ExternalMemoryFeatureFlagsNV const &,
               ExternalMemoryHandleTypeFlagsNV const &,
               ExternalMemoryHandleTypeFlagsNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default;
#else
    bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
             ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
#  endif
    }

    bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageFormatProperties           imageFormatProperties         = {};
    ExternalMemoryFeatureFlagsNV    externalMemoryFeatures        = {};
    ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
    ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalImageFormatPropertiesNV>
  {
    using Type = ExternalImageFormatPropertiesNV;
  };
#endif

  // wrapper struct for struct VkExternalMemoryAcquireUnmodifiedEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryAcquireUnmodifiedEXT.html
  struct ExternalMemoryAcquireUnmodifiedEXT
  {
    using NativeType = VkExternalMemoryAcquireUnmodifiedEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalMemoryAcquireUnmodifiedEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( Bool32 acquireUnmodifiedMemory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast<ExternalMemoryAcquireUnmodifiedEXT const *>( &rhs ) )
    {
    }

    ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryAcquireUnmodifiedEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setAcquireUnmodifiedMemory( Bool32 acquireUnmodifiedMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireUnmodifiedMemory = acquireUnmodifiedMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryAcquireUnmodifiedEXT *>( this );
    }

    operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryAcquireUnmodifiedEXT *>( this );
    }

    operator VkExternalMemoryAcquireUnmodifiedEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryAcquireUnmodifiedEXT *>( this );
    }

    operator VkExternalMemoryAcquireUnmodifiedEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryAcquireUnmodifiedEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, acquireUnmodifiedMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default;
#else
    bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory );
#  endif
    }

    bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::eExternalMemoryAcquireUnmodifiedEXT;
    const void *  pNext                   = {};
    Bool32        acquireUnmodifiedMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryAcquireUnmodifiedEXT>
  {
    using Type = ExternalMemoryAcquireUnmodifiedEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryAcquireUnmodifiedEXT>
  {
    using Type = ExternalMemoryAcquireUnmodifiedEXT;
  };

  // wrapper struct for struct VkExternalMemoryBufferCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryBufferCreateInfo.html
  struct ExternalMemoryBufferCreateInfo
  {
    using NativeType = VkExternalMemoryBufferCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalMemoryBufferCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
    {
    }

    ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>( this );
    }

    operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo *>( this );
    }

    operator VkExternalMemoryBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>( this );
    }

    operator VkExternalMemoryBufferCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryBufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
#else
    bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType       = StructureType::eExternalMemoryBufferCreateInfo;
    const void *                  pNext       = {};
    ExternalMemoryHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryBufferCreateInfo>
  {
    using Type = ExternalMemoryBufferCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
  {
    using Type = ExternalMemoryBufferCreateInfo;
  };

  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;

  // wrapper struct for struct VkExternalMemoryImageCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfo.html
  struct ExternalMemoryImageCreateInfo
  {
    using NativeType = VkExternalMemoryImageCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalMemoryImageCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
    {
    }

    ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo *>( this );
    }

    operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryImageCreateInfo *>( this );
    }

    operator VkExternalMemoryImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryImageCreateInfo *>( this );
    }

    operator VkExternalMemoryImageCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryImageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
#else
    bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType       = StructureType::eExternalMemoryImageCreateInfo;
    const void *                  pNext       = {};
    ExternalMemoryHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryImageCreateInfo>
  {
    using Type = ExternalMemoryImageCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
  {
    using Type = ExternalMemoryImageCreateInfo;
  };

  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;

  // wrapper struct for struct VkExternalMemoryImageCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfoNV.html
  struct ExternalMemoryImageCreateInfoNV
  {
    using NativeType = VkExternalMemoryImageCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalMemoryImageCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
    {
    }

    ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>( this );
    }

    operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>( this );
    }

    operator VkExternalMemoryImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>( this );
    }

    operator VkExternalMemoryImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagsNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default;
#else
    bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType       = StructureType::eExternalMemoryImageCreateInfoNV;
    const void *                    pNext       = {};
    ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryImageCreateInfoNV>
  {
    using Type = ExternalMemoryImageCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
  {
    using Type = ExternalMemoryImageCreateInfoNV;
  };

  // wrapper struct for struct VkExternalMemoryTensorCreateInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryTensorCreateInfoARM.html
  struct ExternalMemoryTensorCreateInfoARM
  {
    using NativeType = VkExternalMemoryTensorCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalMemoryTensorCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleTypes{ handleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryTensorCreateInfoARM( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryTensorCreateInfoARM( *reinterpret_cast<ExternalMemoryTensorCreateInfoARM const *>( &rhs ) )
    {
    }

    ExternalMemoryTensorCreateInfoARM & operator=( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalMemoryTensorCreateInfoARM & operator=( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalMemoryTensorCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalMemoryTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryTensorCreateInfoARM *>( this );
    }

    operator VkExternalMemoryTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryTensorCreateInfoARM *>( this );
    }

    operator VkExternalMemoryTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalMemoryTensorCreateInfoARM *>( this );
    }

    operator VkExternalMemoryTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalMemoryTensorCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryTensorCreateInfoARM const & ) const = default;
#else
    bool operator==( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
#  endif
    }

    bool operator!=( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType       = StructureType::eExternalMemoryTensorCreateInfoARM;
    const void *                  pNext       = {};
    ExternalMemoryHandleTypeFlags handleTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalMemoryTensorCreateInfoARM>
  {
    using Type = ExternalMemoryTensorCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryTensorCreateInfoARM>
  {
    using Type = ExternalMemoryTensorCreateInfoARM;
  };

  // wrapper struct for struct VkExternalSemaphoreProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreProperties.html
  struct ExternalSemaphoreProperties
  {
    using NativeType = VkExternalSemaphoreProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalSemaphoreProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
                                                      ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_         = {},
                                                      ExternalSemaphoreFeatureFlags    externalSemaphoreFeatures_     = {},
                                                      void *                           pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ }
      , compatibleHandleTypes{ compatibleHandleTypes_ }
      , externalSemaphoreFeatures{ externalSemaphoreFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
    {
    }

    ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalSemaphoreProperties *>( this );
    }

    operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalSemaphoreProperties *>( this );
    }

    operator VkExternalSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalSemaphoreProperties *>( this );
    }

    operator VkExternalSemaphoreProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalSemaphoreProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ExternalSemaphoreHandleTypeFlags const &,
               ExternalSemaphoreHandleTypeFlags const &,
               ExternalSemaphoreFeatureFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
#else
    bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
#  endif
    }

    bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                         = StructureType::eExternalSemaphoreProperties;
    void *                           pNext                         = {};
    ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
    ExternalSemaphoreHandleTypeFlags compatibleHandleTypes         = {};
    ExternalSemaphoreFeatureFlags    externalSemaphoreFeatures     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalSemaphoreProperties>
  {
    using Type = ExternalSemaphoreProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
  {
    using Type = ExternalSemaphoreProperties;
  };

  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;

  // wrapper struct for struct VkExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalTensorPropertiesARM.html
  struct ExternalTensorPropertiesARM
  {
    using NativeType = VkExternalTensorPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eExternalTensorPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM( ExternalMemoryProperties externalMemoryProperties_ = {},
                                                      const void *             pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalMemoryProperties{ externalMemoryProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalTensorPropertiesARM( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalTensorPropertiesARM( *reinterpret_cast<ExternalTensorPropertiesARM const *>( &rhs ) )
    {
    }

    ExternalTensorPropertiesARM & operator=( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ExternalTensorPropertiesARM & operator=( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ExternalTensorPropertiesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM &
      setExternalMemoryProperties( ExternalMemoryProperties const & externalMemoryProperties_ ) VULKAN_HPP_NOEXCEPT
    {
      externalMemoryProperties = externalMemoryProperties_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkExternalTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalTensorPropertiesARM *>( this );
    }

    operator VkExternalTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalTensorPropertiesARM *>( this );
    }

    operator VkExternalTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkExternalTensorPropertiesARM *>( this );
    }

    operator VkExternalTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkExternalTensorPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalMemoryProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalTensorPropertiesARM const & ) const = default;
#else
    bool operator==( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties );
#  endif
    }

    bool operator!=( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                    = StructureType::eExternalTensorPropertiesARM;
    const void *             pNext                    = {};
    ExternalMemoryProperties externalMemoryProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkExternalTensorPropertiesARM>
  {
    using Type = ExternalTensorPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eExternalTensorPropertiesARM>
  {
    using Type = ExternalTensorPropertiesARM;
  };

  // wrapper struct for struct VkFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateInfo.html
  struct FenceCreateInfo
  {
    using NativeType = VkFenceCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFenceCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) ) {}

    FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FenceCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceCreateInfo *>( this );
    }

    operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceCreateInfo *>( this );
    }

    operator VkFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFenceCreateInfo *>( this );
    }

    operator VkFenceCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFenceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, FenceCreateFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceCreateInfo const & ) const = default;
#else
    bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType = StructureType::eFenceCreateInfo;
    const void *     pNext = {};
    FenceCreateFlags flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFenceCreateInfo>
  {
    using Type = FenceCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFenceCreateInfo>
  {
    using Type = FenceCreateInfo;
  };

  // wrapper struct for struct VkFenceGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetFdInfoKHR.html
  struct FenceGetFdInfoKHR
  {
    using NativeType = VkFenceGetFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFenceGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( Fence                           fence_      = {},
                                            ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                            const void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fence{ fence_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) ) {}

    FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceGetFdInfoKHR *>( this );
    }

    operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceGetFdInfoKHR *>( this );
    }

    operator VkFenceGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFenceGetFdInfoKHR *>( this );
    }

    operator VkFenceGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFenceGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Fence const &, ExternalFenceHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fence, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
#else
    bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType      = StructureType::eFenceGetFdInfoKHR;
    const void *                    pNext      = {};
    Fence                           fence      = {};
    ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFenceGetFdInfoKHR>
  {
    using Type = FenceGetFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
  {
    using Type = FenceGetFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkFenceGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetWin32HandleInfoKHR.html
  struct FenceGetWin32HandleInfoKHR
  {
    using NativeType = VkFenceGetWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFenceGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( Fence                           fence_      = {},
                                                     ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                                     const void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fence{ fence_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( this );
    }

    operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>( this );
    }

    operator VkFenceGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( this );
    }

    operator VkFenceGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Fence const &, ExternalFenceHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fence, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                   sType      = StructureType::eFenceGetWin32HandleInfoKHR;
    const void *                    pNext      = {};
    Fence                           fence      = {};
    ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFenceGetWin32HandleInfoKHR>
  {
    using Type = FenceGetWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
  {
    using Type = FenceGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkFilterCubicImageViewImageFormatPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFilterCubicImageViewImageFormatPropertiesEXT.html
  struct FilterCubicImageViewImageFormatPropertiesEXT
  {
    using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FilterCubicImageViewImageFormatPropertiesEXT( Bool32 filterCubic_ = {}, Bool32 filterCubicMinmax_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , filterCubic{ filterCubic_ }
      , filterCubicMinmax{ filterCubicMinmax_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
    {
    }

    FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, filterCubic, filterCubicMinmax );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
#else
    bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax );
#  endif
    }

    bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
    void *        pNext             = {};
    Bool32        filterCubic       = {};
    Bool32        filterCubicMinmax = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFilterCubicImageViewImageFormatPropertiesEXT>
  {
    using Type = FilterCubicImageViewImageFormatPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
  {
    using Type = FilterCubicImageViewImageFormatPropertiesEXT;
  };

  // wrapper struct for struct VkFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties.html
  struct FormatProperties
  {
    using NativeType = VkFormatProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FormatProperties( FormatFeatureFlags linearTilingFeatures_  = {},
                                           FormatFeatureFlags optimalTilingFeatures_ = {},
                                           FormatFeatureFlags bufferFeatures_        = {} ) VULKAN_HPP_NOEXCEPT
      : linearTilingFeatures{ linearTilingFeatures_ }
      , optimalTilingFeatures{ optimalTilingFeatures_ }
      , bufferFeatures{ bufferFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) ) {}

    FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FormatProperties const *>( &rhs );
      return *this;
    }

    operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFormatProperties *>( this );
    }

    operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFormatProperties *>( this );
    }

    operator VkFormatProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFormatProperties *>( this );
    }

    operator VkFormatProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<FormatFeatureFlags const &, FormatFeatureFlags const &, FormatFeatureFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FormatProperties const & ) const = default;
#else
    bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) &&
             ( bufferFeatures == rhs.bufferFeatures );
#  endif
    }

    bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    FormatFeatureFlags linearTilingFeatures  = {};
    FormatFeatureFlags optimalTilingFeatures = {};
    FormatFeatureFlags bufferFeatures        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFormatProperties>
  {
    using Type = FormatProperties;
  };
#endif

  // wrapper struct for struct VkFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties2.html
  struct FormatProperties2
  {
    using NativeType = VkFormatProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFormatProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , formatProperties{ formatProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) ) {}

    FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFormatProperties2 *>( this );
    }

    operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFormatProperties2 *>( this );
    }

    operator VkFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFormatProperties2 *>( this );
    }

    operator VkFormatProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, FormatProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, formatProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FormatProperties2 const & ) const = default;
#else
    bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties );
#  endif
    }

    bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eFormatProperties2;
    void *           pNext            = {};
    FormatProperties formatProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFormatProperties2>
  {
    using Type = FormatProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFormatProperties2>
  {
    using Type = FormatProperties2;
  };

  using FormatProperties2KHR = FormatProperties2;

  // wrapper struct for struct VkFormatProperties3, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties3.html
  struct FormatProperties3
  {
    using NativeType = VkFormatProperties3;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFormatProperties3;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FormatProperties3( FormatFeatureFlags2 linearTilingFeatures_  = {},
                                            FormatFeatureFlags2 optimalTilingFeatures_ = {},
                                            FormatFeatureFlags2 bufferFeatures_        = {},
                                            void *              pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , linearTilingFeatures{ linearTilingFeatures_ }
      , optimalTilingFeatures{ optimalTilingFeatures_ }
      , bufferFeatures{ bufferFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) ) {}

    FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FormatProperties3 const *>( &rhs );
      return *this;
    }

    operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFormatProperties3 *>( this );
    }

    operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFormatProperties3 *>( this );
    }

    operator VkFormatProperties3 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFormatProperties3 *>( this );
    }

    operator VkFormatProperties3 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFormatProperties3 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, FormatFeatureFlags2 const &, FormatFeatureFlags2 const &, FormatFeatureFlags2 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FormatProperties3 const & ) const = default;
#else
    bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) &&
             ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures );
#  endif
    }

    bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                 = StructureType::eFormatProperties3;
    void *              pNext                 = {};
    FormatFeatureFlags2 linearTilingFeatures  = {};
    FormatFeatureFlags2 optimalTilingFeatures = {};
    FormatFeatureFlags2 bufferFeatures        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFormatProperties3>
  {
    using Type = FormatProperties3;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFormatProperties3>
  {
    using Type = FormatProperties3;
  };

  using FormatProperties3KHR = FormatProperties3;

  // wrapper struct for struct VkFragmentShadingRateAttachmentInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateAttachmentInfoKHR.html
  struct FragmentShadingRateAttachmentInfoKHR
  {
    using NativeType = VkFragmentShadingRateAttachmentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFragmentShadingRateAttachmentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const AttachmentReference2 * pFragmentShadingRateAttachment_ = {},
                                                               Extent2D                     shadingRateAttachmentTexelSize_ = {},
                                                               const void *                 pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ }
      , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
    {
    }

    FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
      setPFragmentShadingRateAttachment( const AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
      setShadingRateAttachmentTexelSize( Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const AttachmentReference2 * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
#else
    bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) &&
             ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
#  endif
    }

    bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                          = StructureType::eFragmentShadingRateAttachmentInfoKHR;
    const void *                 pNext                          = {};
    const AttachmentReference2 * pFragmentShadingRateAttachment = {};
    Extent2D                     shadingRateAttachmentTexelSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFragmentShadingRateAttachmentInfoKHR>
  {
    using Type = FragmentShadingRateAttachmentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
  {
    using Type = FragmentShadingRateAttachmentInfoKHR;
  };

  // wrapper struct for struct VkFrameBoundaryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryEXT.html
  struct FrameBoundaryEXT
  {
    using NativeType = VkFrameBoundaryEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFrameBoundaryEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryFlagsEXT flags_       = {},
                                           uint64_t              frameID_     = {},
                                           uint32_t              imageCount_  = {},
                                           const Image *         pImages_     = {},
                                           uint32_t              bufferCount_ = {},
                                           const Buffer *        pBuffers_    = {},
                                           uint64_t              tagName_     = {},
                                           size_t                tagSize_     = {},
                                           const void *          pTag_        = {},
                                           const void *          pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , frameID{ frameID_ }
      , imageCount{ imageCount_ }
      , pImages{ pImages_ }
      , bufferCount{ bufferCount_ }
      , pBuffers{ pBuffers_ }
      , tagName{ tagName_ }
      , tagSize{ tagSize_ }
      , pTag{ pTag_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryEXT( *reinterpret_cast<FrameBoundaryEXT const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    FrameBoundaryEXT( FrameBoundaryFlagsEXT                         flags_,
                      uint64_t                                      frameID_,
                      ArrayProxyNoTemporaries<const Image> const &  images_,
                      ArrayProxyNoTemporaries<const Buffer> const & buffers_ = {},
                      uint64_t                                      tagName_ = {},
                      ArrayProxyNoTemporaries<const T> const &      tag_     = {},
                      const void *                                  pNext_   = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , frameID( frameID_ )
      , imageCount( static_cast<uint32_t>( images_.size() ) )
      , pImages( images_.data() )
      , bufferCount( static_cast<uint32_t>( buffers_.size() ) )
      , pBuffers( buffers_.data() )
      , tagName( tagName_ )
      , tagSize( tag_.size() * sizeof( T ) )
      , pTag( tag_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FrameBoundaryEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( FrameBoundaryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) VULKAN_HPP_NOEXCEPT
    {
      frameID = frameID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCount = imageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const Image * pImages_ ) VULKAN_HPP_NOEXCEPT
    {
      pImages = pImages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FrameBoundaryEXT & setImages( ArrayProxyNoTemporaries<const Image> const & images_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCount = static_cast<uint32_t>( images_.size() );
      pImages    = images_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferCount = bufferCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const Buffer * pBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      pBuffers = pBuffers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FrameBoundaryEXT & setBuffers( ArrayProxyNoTemporaries<const Buffer> const & buffers_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferCount = static_cast<uint32_t>( buffers_.size() );
      pBuffers    = buffers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
    {
      tagName = tagName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tagSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
    {
      pTag = pTag_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    FrameBoundaryEXT & setTag( ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tag_.size() * sizeof( T );
      pTag    = tag_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFrameBoundaryEXT *>( this );
    }

    operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFrameBoundaryEXT *>( this );
    }

    operator VkFrameBoundaryEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFrameBoundaryEXT *>( this );
    }

    operator VkFrameBoundaryEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFrameBoundaryEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               FrameBoundaryFlagsEXT const &,
               uint64_t const &,
               uint32_t const &,
               const Image * const &,
               uint32_t const &,
               const Buffer * const &,
               uint64_t const &,
               size_t const &,
               const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FrameBoundaryEXT const & ) const = default;
#else
    bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( frameID == rhs.frameID ) && ( imageCount == rhs.imageCount ) &&
             ( pImages == rhs.pImages ) && ( bufferCount == rhs.bufferCount ) && ( pBuffers == rhs.pBuffers ) && ( tagName == rhs.tagName ) &&
             ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag );
#  endif
    }

    bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType       = StructureType::eFrameBoundaryEXT;
    const void *          pNext       = {};
    FrameBoundaryFlagsEXT flags       = {};
    uint64_t              frameID     = {};
    uint32_t              imageCount  = {};
    const Image *         pImages     = {};
    uint32_t              bufferCount = {};
    const Buffer *        pBuffers    = {};
    uint64_t              tagName     = {};
    size_t                tagSize     = {};
    const void *          pTag        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFrameBoundaryEXT>
  {
    using Type = FrameBoundaryEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFrameBoundaryEXT>
  {
    using Type = FrameBoundaryEXT;
  };

  // wrapper struct for struct VkFrameBoundaryTensorsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryTensorsARM.html
  struct FrameBoundaryTensorsARM
  {
    using NativeType = VkFrameBoundaryTensorsARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFrameBoundaryTensorsARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FrameBoundaryTensorsARM( uint32_t tensorCount_ = {}, const TensorARM * pTensors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorCount{ tensorCount_ }
      , pTensors{ pTensors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FrameBoundaryTensorsARM( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FrameBoundaryTensorsARM( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : FrameBoundaryTensorsARM( *reinterpret_cast<FrameBoundaryTensorsARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FrameBoundaryTensorsARM( ArrayProxyNoTemporaries<const TensorARM> const & tensors_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), tensorCount( static_cast<uint32_t>( tensors_.size() ) ), pTensors( tensors_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FrameBoundaryTensorsARM & operator=( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FrameBoundaryTensorsARM & operator=( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FrameBoundaryTensorsARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setTensorCount( uint32_t tensorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorCount = tensorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPTensors( const TensorARM * pTensors_ ) VULKAN_HPP_NOEXCEPT
    {
      pTensors = pTensors_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FrameBoundaryTensorsARM & setTensors( ArrayProxyNoTemporaries<const TensorARM> const & tensors_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorCount = static_cast<uint32_t>( tensors_.size() );
      pTensors    = tensors_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkFrameBoundaryTensorsARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFrameBoundaryTensorsARM *>( this );
    }

    operator VkFrameBoundaryTensorsARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFrameBoundaryTensorsARM *>( this );
    }

    operator VkFrameBoundaryTensorsARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFrameBoundaryTensorsARM *>( this );
    }

    operator VkFrameBoundaryTensorsARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFrameBoundaryTensorsARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const TensorARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorCount, pTensors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FrameBoundaryTensorsARM const & ) const = default;
#else
    bool operator==( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorCount == rhs.tensorCount ) && ( pTensors == rhs.pTensors );
#  endif
    }

    bool operator!=( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType       = StructureType::eFrameBoundaryTensorsARM;
    const void *      pNext       = {};
    uint32_t          tensorCount = {};
    const TensorARM * pTensors    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFrameBoundaryTensorsARM>
  {
    using Type = FrameBoundaryTensorsARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFrameBoundaryTensorsARM>
  {
    using Type = FrameBoundaryTensorsARM;
  };

  // wrapper struct for struct VkFramebufferAttachmentImageInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentImageInfo.html
  struct FramebufferAttachmentImageInfo
  {
    using NativeType = VkFramebufferAttachmentImageInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFramebufferAttachmentImageInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( ImageCreateFlags flags_           = {},
                                                         ImageUsageFlags  usage_           = {},
                                                         uint32_t         width_           = {},
                                                         uint32_t         height_          = {},
                                                         uint32_t         layerCount_      = {},
                                                         uint32_t         viewFormatCount_ = {},
                                                         const Format *   pViewFormats_    = {},
                                                         const void *     pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , usage{ usage_ }
      , width{ width_ }
      , height{ height_ }
      , layerCount{ layerCount_ }
      , viewFormatCount{ viewFormatCount_ }
      , pViewFormats{ pViewFormats_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentImageInfo( ImageCreateFlags                              flags_,
                                    ImageUsageFlags                               usage_,
                                    uint32_t                                      width_,
                                    uint32_t                                      height_,
                                    uint32_t                                      layerCount_,
                                    ArrayProxyNoTemporaries<const Format> const & viewFormats_,
                                    const void *                                  pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , usage( usage_ )
      , width( width_ )
      , height( height_ )
      , layerCount( layerCount_ )
      , viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) )
      , pViewFormats( viewFormats_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = viewFormatCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewFormats = pViewFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentImageInfo & setViewFormats( ArrayProxyNoTemporaries<const Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
      pViewFormats    = viewFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferAttachmentImageInfo *>( this );
    }

    operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferAttachmentImageInfo *>( this );
    }

    operator VkFramebufferAttachmentImageInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFramebufferAttachmentImageInfo *>( this );
    }

    operator VkFramebufferAttachmentImageInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFramebufferAttachmentImageInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageCreateFlags const &,
               ImageUsageFlags const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               const Format * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
#else
    bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) &&
             ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
#  endif
    }

    bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType           = StructureType::eFramebufferAttachmentImageInfo;
    const void *     pNext           = {};
    ImageCreateFlags flags           = {};
    ImageUsageFlags  usage           = {};
    uint32_t         width           = {};
    uint32_t         height          = {};
    uint32_t         layerCount      = {};
    uint32_t         viewFormatCount = {};
    const Format *   pViewFormats    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFramebufferAttachmentImageInfo>
  {
    using Type = FramebufferAttachmentImageInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
  {
    using Type = FramebufferAttachmentImageInfo;
  };

  using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;

  // wrapper struct for struct VkFramebufferAttachmentsCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentsCreateInfo.html
  struct FramebufferAttachmentsCreateInfo
  {
    using NativeType = VkFramebufferAttachmentsCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFramebufferAttachmentsCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t                               attachmentImageInfoCount_ = {},
                                                           const FramebufferAttachmentImageInfo * pAttachmentImageInfos_    = {},
                                                           const void *                           pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentImageInfoCount{ attachmentImageInfoCount_ }
      , pAttachmentImageInfos{ pAttachmentImageInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentsCreateInfo( ArrayProxyNoTemporaries<const FramebufferAttachmentImageInfo> const & attachmentImageInfos_,
                                      const void *                                                          pNext_ = nullptr )
      : pNext( pNext_ )
      , attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) )
      , pAttachmentImageInfos( attachmentImageInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentImageInfoCount = attachmentImageInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &
      setPAttachmentImageInfos( const FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachmentImageInfos = pAttachmentImageInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentsCreateInfo &
      setAttachmentImageInfos( ArrayProxyNoTemporaries<const FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
      pAttachmentImageInfos    = attachmentImageInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo *>( this );
    }

    operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo *>( this );
    }

    operator VkFramebufferAttachmentsCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFramebufferAttachmentsCreateInfo *>( this );
    }

    operator VkFramebufferAttachmentsCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFramebufferAttachmentsCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const FramebufferAttachmentImageInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
#else
    bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) &&
             ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
#  endif
    }

    bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType                    = StructureType::eFramebufferAttachmentsCreateInfo;
    const void *                           pNext                    = {};
    uint32_t                               attachmentImageInfoCount = {};
    const FramebufferAttachmentImageInfo * pAttachmentImageInfos    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFramebufferAttachmentsCreateInfo>
  {
    using Type = FramebufferAttachmentsCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
  {
    using Type = FramebufferAttachmentsCreateInfo;
  };

  using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;

  // wrapper struct for struct VkFramebufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateInfo.html
  struct FramebufferCreateInfo
  {
    using NativeType = VkFramebufferCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFramebufferCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateFlags flags_           = {},
                                                RenderPass             renderPass_      = {},
                                                uint32_t               attachmentCount_ = {},
                                                const ImageView *      pAttachments_    = {},
                                                uint32_t               width_           = {},
                                                uint32_t               height_          = {},
                                                uint32_t               layers_          = {},
                                                const void *           pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , renderPass{ renderPass_ }
      , attachmentCount{ attachmentCount_ }
      , pAttachments{ pAttachments_ }
      , width{ width_ }
      , height{ height_ }
      , layers{ layers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferCreateInfo( FramebufferCreateFlags                           flags_,
                           RenderPass                                       renderPass_,
                           ArrayProxyNoTemporaries<const ImageView> const & attachments_,
                           uint32_t                                         width_  = {},
                           uint32_t                                         height_ = {},
                           uint32_t                                         layers_ = {},
                           const void *                                     pNext_  = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , renderPass( renderPass_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , width( width_ )
      , height( height_ )
      , layers( layers_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FramebufferCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferCreateInfo & setAttachments( ArrayProxyNoTemporaries<const ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
    {
      layers = layers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferCreateInfo *>( this );
    }

    operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferCreateInfo *>( this );
    }

    operator VkFramebufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFramebufferCreateInfo *>( this );
    }

    operator VkFramebufferCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFramebufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               FramebufferCreateFlags const &,
               RenderPass const &,
               uint32_t const &,
               const ImageView * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferCreateInfo const & ) const = default;
#else
    bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) &&
             ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) &&
             ( layers == rhs.layers );
#  endif
    }

    bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType           = StructureType::eFramebufferCreateInfo;
    const void *           pNext           = {};
    FramebufferCreateFlags flags           = {};
    RenderPass             renderPass      = {};
    uint32_t               attachmentCount = {};
    const ImageView *      pAttachments    = {};
    uint32_t               width           = {};
    uint32_t               height          = {};
    uint32_t               layers          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFramebufferCreateInfo>
  {
    using Type = FramebufferCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
  {
    using Type = FramebufferCreateInfo;
  };

  // wrapper struct for struct VkFramebufferMixedSamplesCombinationNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferMixedSamplesCombinationNV.html
  struct FramebufferMixedSamplesCombinationNV
  {
    using NativeType = VkFramebufferMixedSamplesCombinationNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eFramebufferMixedSamplesCombinationNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( CoverageReductionModeNV coverageReductionMode_ = CoverageReductionModeNV::eMerge,
                                                               SampleCountFlagBits     rasterizationSamples_  = SampleCountFlagBits::e1,
                                                               SampleCountFlags        depthStencilSamples_   = {},
                                                               SampleCountFlags        colorSamples_          = {},
                                                               void *                  pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , coverageReductionMode{ coverageReductionMode_ }
      , rasterizationSamples{ rasterizationSamples_ }
      , depthStencilSamples{ depthStencilSamples_ }
      , colorSamples{ colorSamples_ }
    {
    }

    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
    {
    }

    FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs );
      return *this;
    }

    operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV *>( this );
    }

    operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( this );
    }

    operator VkFramebufferMixedSamplesCombinationNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV *>( this );
    }

    operator VkFramebufferMixedSamplesCombinationNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               CoverageReductionModeNV const &,
               SampleCountFlagBits const &,
               SampleCountFlags const &,
               SampleCountFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default;
#else
    bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) &&
             ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples );
#  endif
    }

    bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType                 = StructureType::eFramebufferMixedSamplesCombinationNV;
    void *                  pNext                 = {};
    CoverageReductionModeNV coverageReductionMode = CoverageReductionModeNV::eMerge;
    SampleCountFlagBits     rasterizationSamples  = SampleCountFlagBits::e1;
    SampleCountFlags        depthStencilSamples   = {};
    SampleCountFlags        colorSamples          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkFramebufferMixedSamplesCombinationNV>
  {
    using Type = FramebufferMixedSamplesCombinationNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
  {
    using Type = FramebufferMixedSamplesCombinationNV;
  };

  // wrapper struct for struct VkGeneratedCommandsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoEXT.html
  struct GeneratedCommandsInfoEXT
  {
    using NativeType = VkGeneratedCommandsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( ShaderStageFlags          shaderStages_           = {},
                                                   IndirectExecutionSetEXT   indirectExecutionSet_   = {},
                                                   IndirectCommandsLayoutEXT indirectCommandsLayout_ = {},
                                                   DeviceAddress             indirectAddress_        = {},
                                                   DeviceSize                indirectAddressSize_    = {},
                                                   DeviceAddress             preprocessAddress_      = {},
                                                   DeviceSize                preprocessSize_         = {},
                                                   uint32_t                  maxSequenceCount_       = {},
                                                   DeviceAddress             sequenceCountAddress_   = {},
                                                   uint32_t                  maxDrawCount_           = {},
                                                   const void *              pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderStages{ shaderStages_ }
      , indirectExecutionSet{ indirectExecutionSet_ }
      , indirectCommandsLayout{ indirectCommandsLayout_ }
      , indirectAddress{ indirectAddress_ }
      , indirectAddressSize{ indirectAddressSize_ }
      , preprocessAddress{ preprocessAddress_ }
      , preprocessSize{ preprocessSize_ }
      , maxSequenceCount{ maxSequenceCount_ }
      , sequenceCountAddress{ sequenceCountAddress_ }
      , maxDrawCount{ maxDrawCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsInfoEXT( *reinterpret_cast<GeneratedCommandsInfoEXT const *>( &rhs ) )
    {
    }

    GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStages = shaderStages_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectExecutionSet = indirectExecutionSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( DeviceAddress indirectAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectAddress = indirectAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( DeviceSize indirectAddressSize_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectAddressSize = indirectAddressSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( DeviceAddress preprocessAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessAddress = preprocessAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessSize = preprocessSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSequenceCount = maxSequenceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( DeviceAddress sequenceCountAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      sequenceCountAddress = sequenceCountAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDrawCount = maxDrawCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderStageFlags const &,
               IndirectExecutionSetEXT const &,
               IndirectCommandsLayoutEXT const &,
               DeviceAddress const &,
               DeviceSize const &,
               DeviceAddress const &,
               DeviceSize const &,
               uint32_t const &,
               DeviceAddress const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderStages,
                       indirectExecutionSet,
                       indirectCommandsLayout,
                       indirectAddress,
                       indirectAddressSize,
                       preprocessAddress,
                       preprocessSize,
                       maxSequenceCount,
                       sequenceCountAddress,
                       maxDrawCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default;
#else
    bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderStages == rhs.shaderStages ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) &&
             ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( indirectAddress == rhs.indirectAddress ) &&
             ( indirectAddressSize == rhs.indirectAddressSize ) && ( preprocessAddress == rhs.preprocessAddress ) && ( preprocessSize == rhs.preprocessSize ) &&
             ( maxSequenceCount == rhs.maxSequenceCount ) && ( sequenceCountAddress == rhs.sequenceCountAddress ) && ( maxDrawCount == rhs.maxDrawCount );
#  endif
    }

    bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                  = StructureType::eGeneratedCommandsInfoEXT;
    const void *              pNext                  = {};
    ShaderStageFlags          shaderStages           = {};
    IndirectExecutionSetEXT   indirectExecutionSet   = {};
    IndirectCommandsLayoutEXT indirectCommandsLayout = {};
    DeviceAddress             indirectAddress        = {};
    DeviceSize                indirectAddressSize    = {};
    DeviceAddress             preprocessAddress      = {};
    DeviceSize                preprocessSize         = {};
    uint32_t                  maxSequenceCount       = {};
    DeviceAddress             sequenceCountAddress   = {};
    uint32_t                  maxDrawCount           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsInfoEXT>
  {
    using Type = GeneratedCommandsInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoEXT>
  {
    using Type = GeneratedCommandsInfoEXT;
  };

  // wrapper struct for struct VkIndirectCommandsStreamNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsStreamNV.html
  struct IndirectCommandsStreamNV
  {
    using NativeType = VkIndirectCommandsStreamNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( Buffer buffer_ = {}, DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer{ buffer_ }
      , offset{ offset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
    {
    }

    IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsStreamNV *>( this );
    }

    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsStreamNV *>( this );
    }

    operator VkIndirectCommandsStreamNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsStreamNV *>( this );
    }

    operator VkIndirectCommandsStreamNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsStreamNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Buffer const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( buffer, offset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsStreamNV const & ) const = default;
#else
    bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( buffer == rhs.buffer ) && ( offset == rhs.offset );
#  endif
    }

    bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Buffer     buffer = {};
    DeviceSize offset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsStreamNV>
  {
    using Type = IndirectCommandsStreamNV;
  };
#endif

  // wrapper struct for struct VkGeneratedCommandsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoNV.html
  struct GeneratedCommandsInfoNV
  {
    using NativeType = VkGeneratedCommandsInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( PipelineBindPoint                pipelineBindPoint_      = PipelineBindPoint::eGraphics,
                                                  Pipeline                         pipeline_               = {},
                                                  IndirectCommandsLayoutNV         indirectCommandsLayout_ = {},
                                                  uint32_t                         streamCount_            = {},
                                                  const IndirectCommandsStreamNV * pStreams_               = {},
                                                  uint32_t                         sequencesCount_         = {},
                                                  Buffer                           preprocessBuffer_       = {},
                                                  DeviceSize                       preprocessOffset_       = {},
                                                  DeviceSize                       preprocessSize_         = {},
                                                  Buffer                           sequencesCountBuffer_   = {},
                                                  DeviceSize                       sequencesCountOffset_   = {},
                                                  Buffer                           sequencesIndexBuffer_   = {},
                                                  DeviceSize                       sequencesIndexOffset_   = {},
                                                  const void *                     pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , pipeline{ pipeline_ }
      , indirectCommandsLayout{ indirectCommandsLayout_ }
      , streamCount{ streamCount_ }
      , pStreams{ pStreams_ }
      , sequencesCount{ sequencesCount_ }
      , preprocessBuffer{ preprocessBuffer_ }
      , preprocessOffset{ preprocessOffset_ }
      , preprocessSize{ preprocessSize_ }
      , sequencesCountBuffer{ sequencesCountBuffer_ }
      , sequencesCountOffset{ sequencesCountOffset_ }
      , sequencesIndexBuffer{ sequencesIndexBuffer_ }
      , sequencesIndexOffset{ sequencesIndexOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsInfoNV( PipelineBindPoint                                               pipelineBindPoint_,
                             Pipeline                                                        pipeline_,
                             IndirectCommandsLayoutNV                                        indirectCommandsLayout_,
                             ArrayProxyNoTemporaries<const IndirectCommandsStreamNV> const & streams_,
                             uint32_t                                                        sequencesCount_       = {},
                             Buffer                                                          preprocessBuffer_     = {},
                             DeviceSize                                                      preprocessOffset_     = {},
                             DeviceSize                                                      preprocessSize_       = {},
                             Buffer                                                          sequencesCountBuffer_ = {},
                             DeviceSize                                                      sequencesCountOffset_ = {},
                             Buffer                                                          sequencesIndexBuffer_ = {},
                             DeviceSize                                                      sequencesIndexOffset_ = {},
                             const void *                                                    pNext_                = nullptr )
      : pNext( pNext_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , pipeline( pipeline_ )
      , indirectCommandsLayout( indirectCommandsLayout_ )
      , streamCount( static_cast<uint32_t>( streams_.size() ) )
      , pStreams( streams_.data() )
      , sequencesCount( sequencesCount_ )
      , preprocessBuffer( preprocessBuffer_ )
      , preprocessOffset( preprocessOffset_ )
      , preprocessSize( preprocessSize_ )
      , sequencesCountBuffer( sequencesCountBuffer_ )
      , sequencesCountOffset( sequencesCountOffset_ )
      , sequencesIndexBuffer( sequencesIndexBuffer_ )
      , sequencesIndexOffset( sequencesIndexOffset_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = streamCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
    {
      pStreams = pStreams_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsInfoNV & setStreams( ArrayProxyNoTemporaries<const IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = static_cast<uint32_t>( streams_.size() );
      pStreams    = streams_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCount = sequencesCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessBuffer = preprocessBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessOffset = preprocessOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessSize = preprocessSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCountBuffer = sequencesCountBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCountOffset = sequencesCountOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesIndexBuffer = sequencesIndexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesIndexOffset = sequencesIndexOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsInfoNV *>( this );
    }

    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsInfoNV *>( this );
    }

    operator VkGeneratedCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsInfoNV *>( this );
    }

    operator VkGeneratedCommandsInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineBindPoint const &,
               Pipeline const &,
               IndirectCommandsLayoutNV const &,
               uint32_t const &,
               const IndirectCommandsStreamNV * const &,
               uint32_t const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &,
               Buffer const &,
               DeviceSize const &,
               Buffer const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       pipelineBindPoint,
                       pipeline,
                       indirectCommandsLayout,
                       streamCount,
                       pStreams,
                       sequencesCount,
                       preprocessBuffer,
                       preprocessOffset,
                       preprocessSize,
                       sequencesCountBuffer,
                       sequencesCountOffset,
                       sequencesIndexBuffer,
                       sequencesIndexOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsInfoNV const & ) const = default;
#else
    bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) &&
             ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) &&
             ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) &&
             ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) &&
             ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) &&
             ( sequencesIndexOffset == rhs.sequencesIndexOffset );
#  endif
    }

    bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                  = StructureType::eGeneratedCommandsInfoNV;
    const void *                     pNext                  = {};
    PipelineBindPoint                pipelineBindPoint      = PipelineBindPoint::eGraphics;
    Pipeline                         pipeline               = {};
    IndirectCommandsLayoutNV         indirectCommandsLayout = {};
    uint32_t                         streamCount            = {};
    const IndirectCommandsStreamNV * pStreams               = {};
    uint32_t                         sequencesCount         = {};
    Buffer                           preprocessBuffer       = {};
    DeviceSize                       preprocessOffset       = {};
    DeviceSize                       preprocessSize         = {};
    Buffer                           sequencesCountBuffer   = {};
    DeviceSize                       sequencesCountOffset   = {};
    Buffer                           sequencesIndexBuffer   = {};
    DeviceSize                       sequencesIndexOffset   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsInfoNV>
  {
    using Type = GeneratedCommandsInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
  {
    using Type = GeneratedCommandsInfoNV;
  };

  // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoEXT.html
  struct GeneratedCommandsMemoryRequirementsInfoEXT
  {
    using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( IndirectExecutionSetEXT   indirectExecutionSet_   = {},
                                                                     IndirectCommandsLayoutEXT indirectCommandsLayout_ = {},
                                                                     uint32_t                  maxSequenceCount_       = {},
                                                                     uint32_t                  maxDrawCount_           = {},
                                                                     const void *              pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , indirectExecutionSet{ indirectExecutionSet_ }
      , indirectCommandsLayout{ indirectCommandsLayout_ }
      , maxSequenceCount{ maxSequenceCount_ }
      , maxDrawCount{ maxDrawCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoEXT const *>( &rhs ) )
    {
    }

    GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT &
      setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectExecutionSet = indirectExecutionSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT &
      setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSequenceCount = maxSequenceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDrawCount = maxDrawCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, IndirectExecutionSetEXT const &, IndirectCommandsLayoutEXT const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default;
#else
    bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) &&
             ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( maxDrawCount == rhs.maxDrawCount );
#  endif
    }

    bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                  = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT;
    const void *              pNext                  = {};
    IndirectExecutionSetEXT   indirectExecutionSet   = {};
    IndirectCommandsLayoutEXT indirectCommandsLayout = {};
    uint32_t                  maxSequenceCount       = {};
    uint32_t                  maxDrawCount           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsMemoryRequirementsInfoEXT>
  {
    using Type = GeneratedCommandsMemoryRequirementsInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT>
  {
    using Type = GeneratedCommandsMemoryRequirementsInfoEXT;
  };

  // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoNV.html
  struct GeneratedCommandsMemoryRequirementsInfoNV
  {
    using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( PipelineBindPoint        pipelineBindPoint_      = PipelineBindPoint::eGraphics,
                                                                    Pipeline                 pipeline_               = {},
                                                                    IndirectCommandsLayoutNV indirectCommandsLayout_ = {},
                                                                    uint32_t                 maxSequencesCount_      = {},
                                                                    const void *             pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , pipeline{ pipeline_ }
      , indirectCommandsLayout{ indirectCommandsLayout_ }
      , maxSequencesCount{ maxSequencesCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
    {
    }

    GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
      setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSequencesCount = maxSequencesCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineBindPoint const &, Pipeline const &, IndirectCommandsLayoutNV const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default;
#else
    bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) &&
             ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount );
#  endif
    }

    bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                  = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
    const void *             pNext                  = {};
    PipelineBindPoint        pipelineBindPoint      = PipelineBindPoint::eGraphics;
    Pipeline                 pipeline               = {};
    IndirectCommandsLayoutNV indirectCommandsLayout = {};
    uint32_t                 maxSequencesCount      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsMemoryRequirementsInfoNV>
  {
    using Type = GeneratedCommandsMemoryRequirementsInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
  {
    using Type = GeneratedCommandsMemoryRequirementsInfoNV;
  };

  // wrapper struct for struct VkGeneratedCommandsPipelineInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsPipelineInfoEXT.html
  struct GeneratedCommandsPipelineInfoEXT
  {
    using NativeType = VkGeneratedCommandsPipelineInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsPipelineInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipeline{ pipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsPipelineInfoEXT( *reinterpret_cast<GeneratedCommandsPipelineInfoEXT const *>( &rhs ) )
    {
    }

    GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsPipelineInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsPipelineInfoEXT *>( this );
    }

    operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsPipelineInfoEXT *>( this );
    }

    operator VkGeneratedCommandsPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsPipelineInfoEXT *>( this );
    }

    operator VkGeneratedCommandsPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsPipelineInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default;
#else
    bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline );
#  endif
    }

    bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eGeneratedCommandsPipelineInfoEXT;
    void *        pNext    = {};
    Pipeline      pipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsPipelineInfoEXT>
  {
    using Type = GeneratedCommandsPipelineInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsPipelineInfoEXT>
  {
    using Type = GeneratedCommandsPipelineInfoEXT;
  };

  // wrapper struct for struct VkGeneratedCommandsShaderInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsShaderInfoEXT.html
  struct GeneratedCommandsShaderInfoEXT
  {
    using NativeType = VkGeneratedCommandsShaderInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGeneratedCommandsShaderInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      GeneratedCommandsShaderInfoEXT( uint32_t shaderCount_ = {}, const ShaderEXT * pShaders_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCount{ shaderCount_ }
      , pShaders{ pShaders_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsShaderInfoEXT( *reinterpret_cast<GeneratedCommandsShaderInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsShaderInfoEXT( ArrayProxyNoTemporaries<const ShaderEXT> const & shaders_, void * pNext_ = nullptr )
      : pNext( pNext_ ), shaderCount( static_cast<uint32_t>( shaders_.size() ) ), pShaders( shaders_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GeneratedCommandsShaderInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCount = shaderCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const ShaderEXT * pShaders_ ) VULKAN_HPP_NOEXCEPT
    {
      pShaders = pShaders_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsShaderInfoEXT & setShaders( ArrayProxyNoTemporaries<const ShaderEXT> const & shaders_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCount = static_cast<uint32_t>( shaders_.size() );
      pShaders    = shaders_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsShaderInfoEXT *>( this );
    }

    operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsShaderInfoEXT *>( this );
    }

    operator VkGeneratedCommandsShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGeneratedCommandsShaderInfoEXT *>( this );
    }

    operator VkGeneratedCommandsShaderInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGeneratedCommandsShaderInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, const ShaderEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCount, pShaders );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default;
#else
    bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pShaders == rhs.pShaders );
#  endif
    }

    bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType       = StructureType::eGeneratedCommandsShaderInfoEXT;
    void *            pNext       = {};
    uint32_t          shaderCount = {};
    const ShaderEXT * pShaders    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGeneratedCommandsShaderInfoEXT>
  {
    using Type = GeneratedCommandsShaderInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsShaderInfoEXT>
  {
    using Type = GeneratedCommandsShaderInfoEXT;
  };

  // wrapper struct for struct VkLatencyTimingsFrameReportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencyTimingsFrameReportNV.html
  struct LatencyTimingsFrameReportNV
  {
    using NativeType = VkLatencyTimingsFrameReportNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLatencyTimingsFrameReportNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_                = {},
                                                      uint64_t inputSampleTimeUs_        = {},
                                                      uint64_t simStartTimeUs_           = {},
                                                      uint64_t simEndTimeUs_             = {},
                                                      uint64_t renderSubmitStartTimeUs_  = {},
                                                      uint64_t renderSubmitEndTimeUs_    = {},
                                                      uint64_t presentStartTimeUs_       = {},
                                                      uint64_t presentEndTimeUs_         = {},
                                                      uint64_t driverStartTimeUs_        = {},
                                                      uint64_t driverEndTimeUs_          = {},
                                                      uint64_t osRenderQueueStartTimeUs_ = {},
                                                      uint64_t osRenderQueueEndTimeUs_   = {},
                                                      uint64_t gpuRenderStartTimeUs_     = {},
                                                      uint64_t gpuRenderEndTimeUs_       = {},
                                                      void *   pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentID{ presentID_ }
      , inputSampleTimeUs{ inputSampleTimeUs_ }
      , simStartTimeUs{ simStartTimeUs_ }
      , simEndTimeUs{ simEndTimeUs_ }
      , renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ }
      , renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ }
      , presentStartTimeUs{ presentStartTimeUs_ }
      , presentEndTimeUs{ presentEndTimeUs_ }
      , driverStartTimeUs{ driverStartTimeUs_ }
      , driverEndTimeUs{ driverEndTimeUs_ }
      , osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ }
      , osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ }
      , gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ }
      , gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : LatencyTimingsFrameReportNV( *reinterpret_cast<LatencyTimingsFrameReportNV const *>( &rhs ) )
    {
    }

    LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LatencyTimingsFrameReportNV const *>( &rhs );
      return *this;
    }

    operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLatencyTimingsFrameReportNV *>( this );
    }

    operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLatencyTimingsFrameReportNV *>( this );
    }

    operator VkLatencyTimingsFrameReportNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLatencyTimingsFrameReportNV *>( this );
    }

    operator VkLatencyTimingsFrameReportNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLatencyTimingsFrameReportNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       presentID,
                       inputSampleTimeUs,
                       simStartTimeUs,
                       simEndTimeUs,
                       renderSubmitStartTimeUs,
                       renderSubmitEndTimeUs,
                       presentStartTimeUs,
                       presentEndTimeUs,
                       driverStartTimeUs,
                       driverEndTimeUs,
                       osRenderQueueStartTimeUs,
                       osRenderQueueEndTimeUs,
                       gpuRenderStartTimeUs,
                       gpuRenderEndTimeUs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default;
#else
    bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) &&
             ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) &&
             ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) &&
             ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) &&
             ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) &&
             ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs );
#  endif
    }

    bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eLatencyTimingsFrameReportNV;
    void *        pNext                    = {};
    uint64_t      presentID                = {};
    uint64_t      inputSampleTimeUs        = {};
    uint64_t      simStartTimeUs           = {};
    uint64_t      simEndTimeUs             = {};
    uint64_t      renderSubmitStartTimeUs  = {};
    uint64_t      renderSubmitEndTimeUs    = {};
    uint64_t      presentStartTimeUs       = {};
    uint64_t      presentEndTimeUs         = {};
    uint64_t      driverStartTimeUs        = {};
    uint64_t      driverEndTimeUs          = {};
    uint64_t      osRenderQueueStartTimeUs = {};
    uint64_t      osRenderQueueEndTimeUs   = {};
    uint64_t      gpuRenderStartTimeUs     = {};
    uint64_t      gpuRenderEndTimeUs       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLatencyTimingsFrameReportNV>
  {
    using Type = LatencyTimingsFrameReportNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLatencyTimingsFrameReportNV>
  {
    using Type = LatencyTimingsFrameReportNV;
  };

  // wrapper struct for struct VkGetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGetLatencyMarkerInfoNV.html
  struct GetLatencyMarkerInfoNV
  {
    using NativeType = VkGetLatencyMarkerInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGetLatencyMarkerInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, LatencyTimingsFrameReportNV * pTimings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , timingCount{ timingCount_ }
      , pTimings{ pTimings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GetLatencyMarkerInfoNV( *reinterpret_cast<GetLatencyMarkerInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GetLatencyMarkerInfoNV( ArrayProxyNoTemporaries<LatencyTimingsFrameReportNV> const & timings_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), timingCount( static_cast<uint32_t>( timings_.size() ) ), pTimings( timings_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GetLatencyMarkerInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      timingCount = timingCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimings = pTimings_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GetLatencyMarkerInfoNV & setTimings( ArrayProxyNoTemporaries<LatencyTimingsFrameReportNV> const & timings_ ) VULKAN_HPP_NOEXCEPT
    {
      timingCount = static_cast<uint32_t>( timings_.size() );
      pTimings    = timings_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGetLatencyMarkerInfoNV *>( this );
    }

    operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGetLatencyMarkerInfoNV *>( this );
    }

    operator VkGetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGetLatencyMarkerInfoNV *>( this );
    }

    operator VkGetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGetLatencyMarkerInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, LatencyTimingsFrameReportNV * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, timingCount, pTimings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default;
#else
    bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingCount == rhs.timingCount ) && ( pTimings == rhs.pTimings );
#  endif
    }

    bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType       = StructureType::eGetLatencyMarkerInfoNV;
    const void *                  pNext       = {};
    uint32_t                      timingCount = {};
    LatencyTimingsFrameReportNV * pTimings    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGetLatencyMarkerInfoNV>
  {
    using Type = GetLatencyMarkerInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGetLatencyMarkerInfoNV>
  {
    using Type = GetLatencyMarkerInfoNV;
  };

  // wrapper struct for struct VkVertexInputBindingDescription, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription.html
  struct VertexInputBindingDescription
  {
    using NativeType = VkVertexInputBindingDescription;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VertexInputBindingDescription( uint32_t binding_ = {}, uint32_t stride_ = {}, VertexInputRate inputRate_ = VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
      : binding{ binding_ }
      , stride{ stride_ }
      , inputRate{ inputRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
    {
    }

    VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VertexInputBindingDescription const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
    {
      inputRate = inputRate_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDescription *>( this );
    }

    operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDescription *>( this );
    }

    operator VkVertexInputBindingDescription const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVertexInputBindingDescription *>( this );
    }

    operator VkVertexInputBindingDescription *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVertexInputBindingDescription *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, VertexInputRate const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( binding, stride, inputRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDescription const & ) const = default;
#else
    bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate );
#  endif
    }

    bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t        binding   = {};
    uint32_t        stride    = {};
    VertexInputRate inputRate = VertexInputRate::eVertex;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVertexInputBindingDescription>
  {
    using Type = VertexInputBindingDescription;
  };
#endif

  // wrapper struct for struct VkVertexInputAttributeDescription, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription.html
  struct VertexInputAttributeDescription
  {
    using NativeType = VkVertexInputAttributeDescription;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {},
                                                          uint32_t binding_  = {},
                                                          Format   format_   = Format::eUndefined,
                                                          uint32_t offset_   = {} ) VULKAN_HPP_NOEXCEPT
      : location{ location_ }
      , binding{ binding_ }
      , format{ format_ }
      , offset{ offset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
    {
    }

    VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
    {
      location = location_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputAttributeDescription *>( this );
    }

    operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputAttributeDescription *>( this );
    }

    operator VkVertexInputAttributeDescription const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVertexInputAttributeDescription *>( this );
    }

    operator VkVertexInputAttributeDescription *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVertexInputAttributeDescription *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, Format const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( location, binding, format, offset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputAttributeDescription const & ) const = default;
#else
    bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset );
#  endif
    }

    bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t location = {};
    uint32_t binding  = {};
    Format   format   = Format::eUndefined;
    uint32_t offset   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVertexInputAttributeDescription>
  {
    using Type = VertexInputAttributeDescription;
  };
#endif

  // wrapper struct for struct VkPipelineVertexInputStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputStateCreateInfo.html
  struct PipelineVertexInputStateCreateInfo
  {
    using NativeType = VkPipelineVertexInputStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineVertexInputStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags     flags_                           = {},
                                                             uint32_t                                vertexBindingDescriptionCount_   = {},
                                                             const VertexInputBindingDescription *   pVertexBindingDescriptions_      = {},
                                                             uint32_t                                vertexAttributeDescriptionCount_ = {},
                                                             const VertexInputAttributeDescription * pVertexAttributeDescriptions_    = {},
                                                             const void *                            pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ }
      , pVertexBindingDescriptions{ pVertexBindingDescriptions_ }
      , vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ }
      , pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags                                    flags_,
                                        ArrayProxyNoTemporaries<const VertexInputBindingDescription> const &   vertexBindingDescriptions_,
                                        ArrayProxyNoTemporaries<const VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {},
                                        const void *                                                           pNext_                       = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) )
      , pVertexBindingDescriptions( vertexBindingDescriptions_.data() )
      , vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) )
      , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags( PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
      setPVertexBindingDescriptions( const VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexBindingDescriptions = pVertexBindingDescriptions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo &
      setVertexBindingDescriptions( ArrayProxyNoTemporaries<const VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
      pVertexBindingDescriptions    = vertexBindingDescriptions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
      setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
      setPVertexAttributeDescriptions( const VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo &
      setVertexAttributeDescriptions( ArrayProxyNoTemporaries<const VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
      pVertexAttributeDescriptions    = vertexAttributeDescriptions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineVertexInputStateCreateFlags const &,
               uint32_t const &,
               const VertexInputBindingDescription * const &,
               uint32_t const &,
               const VertexInputAttributeDescription * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) &&
             ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
#  endif
    }

    bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType                           = StructureType::ePipelineVertexInputStateCreateInfo;
    const void *                            pNext                           = {};
    PipelineVertexInputStateCreateFlags     flags                           = {};
    uint32_t                                vertexBindingDescriptionCount   = {};
    const VertexInputBindingDescription *   pVertexBindingDescriptions      = {};
    uint32_t                                vertexAttributeDescriptionCount = {};
    const VertexInputAttributeDescription * pVertexAttributeDescriptions    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineVertexInputStateCreateInfo>
  {
    using Type = PipelineVertexInputStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
  {
    using Type = PipelineVertexInputStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineInputAssemblyStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInputAssemblyStateCreateInfo.html
  struct PipelineInputAssemblyStateCreateInfo
  {
    using NativeType = VkPipelineInputAssemblyStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineInputAssemblyStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_                  = {},
                                                               PrimitiveTopology                     topology_               = PrimitiveTopology::ePointList,
                                                               Bool32                                primitiveRestartEnable_ = {},
                                                               const void *                          pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , topology{ topology_ }
      , primitiveRestartEnable{ primitiveRestartEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags( PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
    {
      topology = topology_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveRestartEnable = primitiveRestartEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

    operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

    operator VkPipelineInputAssemblyStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

    operator VkPipelineInputAssemblyStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineInputAssemblyStateCreateFlags const &, PrimitiveTopology const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, topology, primitiveRestartEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) &&
             ( primitiveRestartEnable == rhs.primitiveRestartEnable );
#  endif
    }

    bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                  = StructureType::ePipelineInputAssemblyStateCreateInfo;
    const void *                          pNext                  = {};
    PipelineInputAssemblyStateCreateFlags flags                  = {};
    PrimitiveTopology                     topology               = PrimitiveTopology::ePointList;
    Bool32                                primitiveRestartEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineInputAssemblyStateCreateInfo>
  {
    using Type = PipelineInputAssemblyStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
  {
    using Type = PipelineInputAssemblyStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineTessellationStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationStateCreateInfo.html
  struct PipelineTessellationStateCreateInfo
  {
    using NativeType = VkPipelineTessellationStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineTessellationStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_              = {},
                                                              uint32_t                             patchControlPoints_ = {},
                                                              const void *                         pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , patchControlPoints{ patchControlPoints_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags( PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      patchControlPoints = patchControlPoints_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineTessellationStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineTessellationStateCreateFlags const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, patchControlPoints );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints );
#  endif
    }

    bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType              = StructureType::ePipelineTessellationStateCreateInfo;
    const void *                         pNext              = {};
    PipelineTessellationStateCreateFlags flags              = {};
    uint32_t                             patchControlPoints = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineTessellationStateCreateInfo>
  {
    using Type = PipelineTessellationStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
  {
    using Type = PipelineTessellationStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineViewportStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportStateCreateInfo.html
  struct PipelineViewportStateCreateInfo
  {
    using NativeType = VkPipelineViewportStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_         = {},
                                                          uint32_t                         viewportCount_ = {},
                                                          const Viewport *                 pViewports_    = {},
                                                          uint32_t                         scissorCount_  = {},
                                                          const Rect2D *                   pScissors_     = {},
                                                          const void *                     pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , viewportCount{ viewportCount_ }
      , pViewports{ pViewports_ }
      , scissorCount{ scissorCount_ }
      , pScissors{ pScissors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags                flags_,
                                     ArrayProxyNoTemporaries<const Viewport> const & viewports_,
                                     ArrayProxyNoTemporaries<const Rect2D> const &   scissors_ = {},
                                     const void *                                    pNext_    = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , viewportCount( static_cast<uint32_t>( viewports_.size() ) )
      , pViewports( viewports_.data() )
      , scissorCount( static_cast<uint32_t>( scissors_.size() ) )
      , pScissors( scissors_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewports = pViewports_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo & setViewports( ArrayProxyNoTemporaries<const Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = static_cast<uint32_t>( viewports_.size() );
      pViewports    = viewports_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      scissorCount = scissorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
    {
      pScissors = pScissors_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo & setScissors( ArrayProxyNoTemporaries<const Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
    {
      scissorCount = static_cast<uint32_t>( scissors_.size() );
      pScissors    = scissors_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo *>( this );
    }

    operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportStateCreateInfo *>( this );
    }

    operator VkPipelineViewportStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportStateCreateInfo *>( this );
    }

    operator VkPipelineViewportStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineViewportStateCreateFlags const &,
               uint32_t const &,
               const Viewport * const &,
               uint32_t const &,
               const Rect2D * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) &&
             ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors );
#  endif
    }

    bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType         = StructureType::ePipelineViewportStateCreateInfo;
    const void *                     pNext         = {};
    PipelineViewportStateCreateFlags flags         = {};
    uint32_t                         viewportCount = {};
    const Viewport *                 pViewports    = {};
    uint32_t                         scissorCount  = {};
    const Rect2D *                   pScissors     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportStateCreateInfo>
  {
    using Type = PipelineViewportStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
  {
    using Type = PipelineViewportStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineRasterizationStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateCreateInfo.html
  struct PipelineRasterizationStateCreateInfo
  {
    using NativeType = VkPipelineRasterizationStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_                   = {},
                                                               Bool32                                depthClampEnable_        = {},
                                                               Bool32                                rasterizerDiscardEnable_ = {},
                                                               PolygonMode                           polygonMode_             = PolygonMode::eFill,
                                                               CullModeFlags                         cullMode_                = {},
                                                               FrontFace                             frontFace_               = FrontFace::eCounterClockwise,
                                                               Bool32                                depthBiasEnable_         = {},
                                                               float                                 depthBiasConstantFactor_ = {},
                                                               float                                 depthBiasClamp_          = {},
                                                               float                                 depthBiasSlopeFactor_    = {},
                                                               float                                 lineWidth_               = {},
                                                               const void *                          pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , depthClampEnable{ depthClampEnable_ }
      , rasterizerDiscardEnable{ rasterizerDiscardEnable_ }
      , polygonMode{ polygonMode_ }
      , cullMode{ cullMode_ }
      , frontFace{ frontFace_ }
      , depthBiasEnable{ depthBiasEnable_ }
      , depthBiasConstantFactor{ depthBiasConstantFactor_ }
      , depthBiasClamp{ depthBiasClamp_ }
      , depthBiasSlopeFactor{ depthBiasSlopeFactor_ }
      , lineWidth{ lineWidth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags( PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClampEnable = depthClampEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizerDiscardEnable = rasterizerDiscardEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
    {
      polygonMode = polygonMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
    {
      cullMode = cullMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
    {
      frontFace = frontFace_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasEnable = depthBiasEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasConstantFactor = depthBiasConstantFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasClamp = depthBiasClamp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasSlopeFactor = depthBiasSlopeFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
    {
      lineWidth = lineWidth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineRasterizationStateCreateFlags const &,
               Bool32 const &,
               Bool32 const &,
               PolygonMode const &,
               CullModeFlags const &,
               FrontFace const &,
               Bool32 const &,
               float const &,
               float const &,
               float const &,
               float const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       depthClampEnable,
                       rasterizerDiscardEnable,
                       polygonMode,
                       cullMode,
                       frontFace,
                       depthBiasEnable,
                       depthBiasConstantFactor,
                       depthBiasClamp,
                       depthBiasSlopeFactor,
                       lineWidth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) &&
             ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) &&
             ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
             ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth );
#  endif
    }

    bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                   = StructureType::ePipelineRasterizationStateCreateInfo;
    const void *                          pNext                   = {};
    PipelineRasterizationStateCreateFlags flags                   = {};
    Bool32                                depthClampEnable        = {};
    Bool32                                rasterizerDiscardEnable = {};
    PolygonMode                           polygonMode             = PolygonMode::eFill;
    CullModeFlags                         cullMode                = {};
    FrontFace                             frontFace               = FrontFace::eCounterClockwise;
    Bool32                                depthBiasEnable         = {};
    float                                 depthBiasConstantFactor = {};
    float                                 depthBiasClamp          = {};
    float                                 depthBiasSlopeFactor    = {};
    float                                 lineWidth               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationStateCreateInfo>
  {
    using Type = PipelineRasterizationStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
  {
    using Type = PipelineRasterizationStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineMultisampleStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineMultisampleStateCreateInfo.html
  struct PipelineMultisampleStateCreateInfo
  {
    using NativeType = VkPipelineMultisampleStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineMultisampleStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_                 = {},
                                                             SampleCountFlagBits                 rasterizationSamples_  = SampleCountFlagBits::e1,
                                                             Bool32                              sampleShadingEnable_   = {},
                                                             float                               minSampleShading_      = {},
                                                             const SampleMask *                  pSampleMask_           = {},
                                                             Bool32                              alphaToCoverageEnable_ = {},
                                                             Bool32                              alphaToOneEnable_      = {},
                                                             const void *                        pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , rasterizationSamples{ rasterizationSamples_ }
      , sampleShadingEnable{ sampleShadingEnable_ }
      , minSampleShading{ minSampleShading_ }
      , pSampleMask{ pSampleMask_ }
      , alphaToCoverageEnable{ alphaToCoverageEnable_ }
      , alphaToOneEnable{ alphaToOneEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags( PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationSamples = rasterizationSamples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleShadingEnable = sampleShadingEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
    {
      minSampleShading = minSampleShading_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleMask = pSampleMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToCoverageEnable = alphaToCoverageEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToOneEnable = alphaToOneEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>( this );
    }

    operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>( this );
    }

    operator VkPipelineMultisampleStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>( this );
    }

    operator VkPipelineMultisampleStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineMultisampleStateCreateFlags const &,
               SampleCountFlagBits const &,
               Bool32 const &,
               float const &,
               const SampleMask * const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) &&
             ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) &&
             ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable );
#  endif
    }

    bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType                 = StructureType::ePipelineMultisampleStateCreateInfo;
    const void *                        pNext                 = {};
    PipelineMultisampleStateCreateFlags flags                 = {};
    SampleCountFlagBits                 rasterizationSamples  = SampleCountFlagBits::e1;
    Bool32                              sampleShadingEnable   = {};
    float                               minSampleShading      = {};
    const SampleMask *                  pSampleMask           = {};
    Bool32                              alphaToCoverageEnable = {};
    Bool32                              alphaToOneEnable      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineMultisampleStateCreateInfo>
  {
    using Type = PipelineMultisampleStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
  {
    using Type = PipelineMultisampleStateCreateInfo;
  };

  // wrapper struct for struct VkStencilOpState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilOpState.html
  struct StencilOpState
  {
    using NativeType = VkStencilOpState;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR StencilOpState( StencilOp failOp_      = StencilOp::eKeep,
                                         StencilOp passOp_      = StencilOp::eKeep,
                                         StencilOp depthFailOp_ = StencilOp::eKeep,
                                         CompareOp compareOp_   = CompareOp::eNever,
                                         uint32_t  compareMask_ = {},
                                         uint32_t  writeMask_   = {},
                                         uint32_t  reference_   = {} ) VULKAN_HPP_NOEXCEPT
      : failOp{ failOp_ }
      , passOp{ passOp_ }
      , depthFailOp{ depthFailOp_ }
      , compareOp{ compareOp_ }
      , compareMask{ compareMask_ }
      , writeMask{ writeMask_ }
      , reference{ reference_ }
    {
    }

    VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) ) {}

    StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<StencilOpState const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
    {
      failOp = failOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
    {
      passOp = passOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthFailOp = depthFailOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      compareOp = compareOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
    {
      compareMask = compareMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
    {
      writeMask = writeMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
    {
      reference = reference_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStencilOpState *>( this );
    }

    operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStencilOpState *>( this );
    }

    operator VkStencilOpState const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkStencilOpState *>( this );
    }

    operator VkStencilOpState *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkStencilOpState *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StencilOp const &, StencilOp const &, StencilOp const &, CompareOp const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StencilOpState const & ) const = default;
#else
    bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) &&
             ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference );
#  endif
    }

    bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StencilOp failOp      = StencilOp::eKeep;
    StencilOp passOp      = StencilOp::eKeep;
    StencilOp depthFailOp = StencilOp::eKeep;
    CompareOp compareOp   = CompareOp::eNever;
    uint32_t  compareMask = {};
    uint32_t  writeMask   = {};
    uint32_t  reference   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkStencilOpState>
  {
    using Type = StencilOpState;
  };
#endif

  // wrapper struct for struct VkPipelineDepthStencilStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateInfo.html
  struct PipelineDepthStencilStateCreateInfo
  {
    using NativeType = VkPipelineDepthStencilStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineDepthStencilStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_                 = {},
                                                              Bool32                               depthTestEnable_       = {},
                                                              Bool32                               depthWriteEnable_      = {},
                                                              CompareOp                            depthCompareOp_        = CompareOp::eNever,
                                                              Bool32                               depthBoundsTestEnable_ = {},
                                                              Bool32                               stencilTestEnable_     = {},
                                                              StencilOpState                       front_                 = {},
                                                              StencilOpState                       back_                  = {},
                                                              float                                minDepthBounds_        = {},
                                                              float                                maxDepthBounds_        = {},
                                                              const void *                         pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , depthTestEnable{ depthTestEnable_ }
      , depthWriteEnable{ depthWriteEnable_ }
      , depthCompareOp{ depthCompareOp_ }
      , depthBoundsTestEnable{ depthBoundsTestEnable_ }
      , stencilTestEnable{ stencilTestEnable_ }
      , front{ front_ }
      , back{ back_ }
      , minDepthBounds{ minDepthBounds_ }
      , maxDepthBounds{ maxDepthBounds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags( PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthTestEnable = depthTestEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthWriteEnable = depthWriteEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthCompareOp = depthCompareOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBoundsTestEnable = depthBoundsTestEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilTestEnable = stencilTestEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
    {
      front = front_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
    {
      back = back_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      minDepthBounds = minDepthBounds_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDepthBounds = maxDepthBounds_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>( this );
    }

    operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>( this );
    }

    operator VkPipelineDepthStencilStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>( this );
    }

    operator VkPipelineDepthStencilStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineDepthStencilStateCreateFlags const &,
               Bool32 const &,
               Bool32 const &,
               CompareOp const &,
               Bool32 const &,
               Bool32 const &,
               StencilOpState const &,
               StencilOpState const &,
               float const &,
               float const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       depthTestEnable,
                       depthWriteEnable,
                       depthCompareOp,
                       depthBoundsTestEnable,
                       stencilTestEnable,
                       front,
                       back,
                       minDepthBounds,
                       maxDepthBounds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) &&
             ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) &&
             ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) &&
             ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds );
#  endif
    }

    bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType                 = StructureType::ePipelineDepthStencilStateCreateInfo;
    const void *                         pNext                 = {};
    PipelineDepthStencilStateCreateFlags flags                 = {};
    Bool32                               depthTestEnable       = {};
    Bool32                               depthWriteEnable      = {};
    CompareOp                            depthCompareOp        = CompareOp::eNever;
    Bool32                               depthBoundsTestEnable = {};
    Bool32                               stencilTestEnable     = {};
    StencilOpState                       front                 = {};
    StencilOpState                       back                  = {};
    float                                minDepthBounds        = {};
    float                                maxDepthBounds        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineDepthStencilStateCreateInfo>
  {
    using Type = PipelineDepthStencilStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
  {
    using Type = PipelineDepthStencilStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineColorBlendAttachmentState, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAttachmentState.html
  struct PipelineColorBlendAttachmentState
  {
    using NativeType = VkPipelineColorBlendAttachmentState;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( Bool32              blendEnable_         = {},
                                                            BlendFactor         srcColorBlendFactor_ = BlendFactor::eZero,
                                                            BlendFactor         dstColorBlendFactor_ = BlendFactor::eZero,
                                                            BlendOp             colorBlendOp_        = BlendOp::eAdd,
                                                            BlendFactor         srcAlphaBlendFactor_ = BlendFactor::eZero,
                                                            BlendFactor         dstAlphaBlendFactor_ = BlendFactor::eZero,
                                                            BlendOp             alphaBlendOp_        = BlendOp::eAdd,
                                                            ColorComponentFlags colorWriteMask_      = {} ) VULKAN_HPP_NOEXCEPT
      : blendEnable{ blendEnable_ }
      , srcColorBlendFactor{ srcColorBlendFactor_ }
      , dstColorBlendFactor{ dstColorBlendFactor_ }
      , colorBlendOp{ colorBlendOp_ }
      , srcAlphaBlendFactor{ srcAlphaBlendFactor_ }
      , dstAlphaBlendFactor{ dstAlphaBlendFactor_ }
      , alphaBlendOp{ alphaBlendOp_ }
      , colorWriteMask{ colorWriteMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
    {
    }

    PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      blendEnable = blendEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcColorBlendFactor = srcColorBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstColorBlendFactor = dstColorBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      colorBlendOp = colorBlendOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAlphaBlendFactor = srcAlphaBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAlphaBlendFactor = dstAlphaBlendFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaBlendOp = alphaBlendOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask( ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
    {
      colorWriteMask = colorWriteMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState *>( this );
    }

    operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendAttachmentState *>( this );
    }

    operator VkPipelineColorBlendAttachmentState const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineColorBlendAttachmentState *>( this );
    }

    operator VkPipelineColorBlendAttachmentState *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineColorBlendAttachmentState *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Bool32 const &,
               BlendFactor const &,
               BlendFactor const &,
               BlendOp const &,
               BlendFactor const &,
               BlendFactor const &,
               BlendOp const &,
               ColorComponentFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
#else
    bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) &&
             ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) &&
             ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask );
#  endif
    }

    bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Bool32              blendEnable         = {};
    BlendFactor         srcColorBlendFactor = BlendFactor::eZero;
    BlendFactor         dstColorBlendFactor = BlendFactor::eZero;
    BlendOp             colorBlendOp        = BlendOp::eAdd;
    BlendFactor         srcAlphaBlendFactor = BlendFactor::eZero;
    BlendFactor         dstAlphaBlendFactor = BlendFactor::eZero;
    BlendOp             alphaBlendOp        = BlendOp::eAdd;
    ColorComponentFlags colorWriteMask      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineColorBlendAttachmentState>
  {
    using Type = PipelineColorBlendAttachmentState;
  };
#endif

  // wrapper struct for struct VkPipelineColorBlendStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateInfo.html
  struct PipelineColorBlendStateCreateInfo
  {
    using NativeType = VkPipelineColorBlendStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineColorBlendStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags        flags_           = {},
                                                               Bool32                                    logicOpEnable_   = {},
                                                               LogicOp                                   logicOp_         = LogicOp::eClear,
                                                               uint32_t                                  attachmentCount_ = {},
                                                               const PipelineColorBlendAttachmentState * pAttachments_    = {},
                                                               std::array<float, 4> const &              blendConstants_  = {},
                                                               const void *                              pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , logicOpEnable{ logicOpEnable_ }
      , logicOp{ logicOp_ }
      , attachmentCount{ attachmentCount_ }
      , pAttachments{ pAttachments_ }
      , blendConstants{ blendConstants_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags                                       flags_,
                                       Bool32                                                                   logicOpEnable_,
                                       LogicOp                                                                  logicOp_,
                                       ArrayProxyNoTemporaries<const PipelineColorBlendAttachmentState> const & attachments_,
                                       std::array<float, 4> const &                                             blendConstants_ = {},
                                       const void *                                                             pNext_          = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , logicOpEnable( logicOpEnable_ )
      , logicOp( logicOp_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , blendConstants( blendConstants_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOpEnable = logicOpEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOp = logicOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( const PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorBlendStateCreateInfo &
      setAttachments( ArrayProxyNoTemporaries<const PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float, 4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
    {
      blendConstants = blendConstants_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>( this );
    }

    operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>( this );
    }

    operator VkPipelineColorBlendStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>( this );
    }

    operator VkPipelineColorBlendStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineColorBlendStateCreateFlags const &,
               Bool32 const &,
               LogicOp const &,
               uint32_t const &,
               const PipelineColorBlendAttachmentState * const &,
               ArrayWrapper1D<float, 4> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) &&
             ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
             ( blendConstants == rhs.blendConstants );
#  endif
    }

    bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                             sType           = StructureType::ePipelineColorBlendStateCreateInfo;
    const void *                              pNext           = {};
    PipelineColorBlendStateCreateFlags        flags           = {};
    Bool32                                    logicOpEnable   = {};
    LogicOp                                   logicOp         = LogicOp::eClear;
    uint32_t                                  attachmentCount = {};
    const PipelineColorBlendAttachmentState * pAttachments    = {};
    ArrayWrapper1D<float, 4>                  blendConstants  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineColorBlendStateCreateInfo>
  {
    using Type = PipelineColorBlendStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
  {
    using Type = PipelineColorBlendStateCreateInfo;
  };

  // wrapper struct for struct VkPipelineDynamicStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDynamicStateCreateInfo.html
  struct PipelineDynamicStateCreateInfo
  {
    using NativeType = VkPipelineDynamicStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineDynamicStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_             = {},
                                                         uint32_t                        dynamicStateCount_ = {},
                                                         const DynamicState *            pDynamicStates_    = {},
                                                         const void *                    pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dynamicStateCount{ dynamicStateCount_ }
      , pDynamicStates{ pDynamicStates_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags                     flags_,
                                    ArrayProxyNoTemporaries<const DynamicState> const & dynamicStates_,
                                    const void *                                        pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicStateCount = dynamicStateCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicStates = pDynamicStates_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDynamicStateCreateInfo & setDynamicStates( ArrayProxyNoTemporaries<const DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
      pDynamicStates    = dynamicStates_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo *>( this );
    }

    operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo *>( this );
    }

    operator VkPipelineDynamicStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineDynamicStateCreateInfo *>( this );
    }

    operator VkPipelineDynamicStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineDynamicStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineDynamicStateCreateFlags const &, uint32_t const &, const DynamicState * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) &&
             ( pDynamicStates == rhs.pDynamicStates );
#  endif
    }

    bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType             = StructureType::ePipelineDynamicStateCreateInfo;
    const void *                    pNext             = {};
    PipelineDynamicStateCreateFlags flags             = {};
    uint32_t                        dynamicStateCount = {};
    const DynamicState *            pDynamicStates    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineDynamicStateCreateInfo>
  {
    using Type = PipelineDynamicStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
  {
    using Type = PipelineDynamicStateCreateInfo;
  };

  // wrapper struct for struct VkGraphicsPipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineCreateInfo.html
  struct GraphicsPipelineCreateInfo
  {
    using NativeType = VkGraphicsPipelineCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGraphicsPipelineCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( PipelineCreateFlags                          flags_               = {},
                                                        uint32_t                                     stageCount_          = {},
                                                        const PipelineShaderStageCreateInfo *        pStages_             = {},
                                                        const PipelineVertexInputStateCreateInfo *   pVertexInputState_   = {},
                                                        const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
                                                        const PipelineTessellationStateCreateInfo *  pTessellationState_  = {},
                                                        const PipelineViewportStateCreateInfo *      pViewportState_      = {},
                                                        const PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
                                                        const PipelineMultisampleStateCreateInfo *   pMultisampleState_   = {},
                                                        const PipelineDepthStencilStateCreateInfo *  pDepthStencilState_  = {},
                                                        const PipelineColorBlendStateCreateInfo *    pColorBlendState_    = {},
                                                        const PipelineDynamicStateCreateInfo *       pDynamicState_       = {},
                                                        PipelineLayout                               layout_              = {},
                                                        RenderPass                                   renderPass_          = {},
                                                        uint32_t                                     subpass_             = {},
                                                        Pipeline                                     basePipelineHandle_  = {},
                                                        int32_t                                      basePipelineIndex_   = {},
                                                        const void *                                 pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stageCount{ stageCount_ }
      , pStages{ pStages_ }
      , pVertexInputState{ pVertexInputState_ }
      , pInputAssemblyState{ pInputAssemblyState_ }
      , pTessellationState{ pTessellationState_ }
      , pViewportState{ pViewportState_ }
      , pRasterizationState{ pRasterizationState_ }
      , pMultisampleState{ pMultisampleState_ }
      , pDepthStencilState{ pDepthStencilState_ }
      , pColorBlendState{ pColorBlendState_ }
      , pDynamicState{ pDynamicState_ }
      , layout{ layout_ }
      , renderPass{ renderPass_ }
      , subpass{ subpass_ }
      , basePipelineHandle{ basePipelineHandle_ }
      , basePipelineIndex{ basePipelineIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineCreateInfo( PipelineCreateFlags                                                  flags_,
                                ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_,
                                const PipelineVertexInputStateCreateInfo *                           pVertexInputState_   = {},
                                const PipelineInputAssemblyStateCreateInfo *                         pInputAssemblyState_ = {},
                                const PipelineTessellationStateCreateInfo *                          pTessellationState_  = {},
                                const PipelineViewportStateCreateInfo *                              pViewportState_      = {},
                                const PipelineRasterizationStateCreateInfo *                         pRasterizationState_ = {},
                                const PipelineMultisampleStateCreateInfo *                           pMultisampleState_   = {},
                                const PipelineDepthStencilStateCreateInfo *                          pDepthStencilState_  = {},
                                const PipelineColorBlendStateCreateInfo *                            pColorBlendState_    = {},
                                const PipelineDynamicStateCreateInfo *                               pDynamicState_       = {},
                                PipelineLayout                                                       layout_              = {},
                                RenderPass                                                           renderPass_          = {},
                                uint32_t                                                             subpass_             = {},
                                Pipeline                                                             basePipelineHandle_  = {},
                                int32_t                                                              basePipelineIndex_   = {},
                                const void *                                                         pNext_               = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , pVertexInputState( pVertexInputState_ )
      , pInputAssemblyState( pInputAssemblyState_ )
      , pTessellationState( pTessellationState_ )
      , pViewportState( pViewportState_ )
      , pRasterizationState( pRasterizationState_ )
      , pMultisampleState( pMultisampleState_ )
      , pDepthStencilState( pDepthStencilState_ )
      , pColorBlendState( pColorBlendState_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , renderPass( renderPass_ )
      , subpass( subpass_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineCreateInfo & setStages( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexInputState = pVertexInputState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAssemblyState = pInputAssemblyState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pTessellationState = pTessellationState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( const PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportState = pViewportState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPRasterizationState( const PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pRasterizationState = pRasterizationState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPMultisampleState( const PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
    {
      pMultisampleState = pMultisampleState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
      setPDepthStencilState( const PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilState = pDepthStencilState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( const PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorBlendState = pColorBlendState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( const PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicState = pDynamicState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( this );
    }

    operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsPipelineCreateInfo *>( this );
    }

    operator VkGraphicsPipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( this );
    }

    operator VkGraphicsPipelineCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGraphicsPipelineCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags const &,
               uint32_t const &,
               const PipelineShaderStageCreateInfo * const &,
               const PipelineVertexInputStateCreateInfo * const &,
               const PipelineInputAssemblyStateCreateInfo * const &,
               const PipelineTessellationStateCreateInfo * const &,
               const PipelineViewportStateCreateInfo * const &,
               const PipelineRasterizationStateCreateInfo * const &,
               const PipelineMultisampleStateCreateInfo * const &,
               const PipelineDepthStencilStateCreateInfo * const &,
               const PipelineColorBlendStateCreateInfo * const &,
               const PipelineDynamicStateCreateInfo * const &,
               PipelineLayout const &,
               RenderPass const &,
               uint32_t const &,
               Pipeline const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       stageCount,
                       pStages,
                       pVertexInputState,
                       pInputAssemblyState,
                       pTessellationState,
                       pViewportState,
                       pRasterizationState,
                       pMultisampleState,
                       pDepthStencilState,
                       pColorBlendState,
                       pDynamicState,
                       layout,
                       renderPass,
                       subpass,
                       basePipelineHandle,
                       basePipelineIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
#else
    bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) &&
             ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) &&
             ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) &&
             ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) &&
             ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
             ( basePipelineIndex == rhs.basePipelineIndex );
#  endif
    }

    bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                sType               = StructureType::eGraphicsPipelineCreateInfo;
    const void *                                 pNext               = {};
    PipelineCreateFlags                          flags               = {};
    uint32_t                                     stageCount          = {};
    const PipelineShaderStageCreateInfo *        pStages             = {};
    const PipelineVertexInputStateCreateInfo *   pVertexInputState   = {};
    const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
    const PipelineTessellationStateCreateInfo *  pTessellationState  = {};
    const PipelineViewportStateCreateInfo *      pViewportState      = {};
    const PipelineRasterizationStateCreateInfo * pRasterizationState = {};
    const PipelineMultisampleStateCreateInfo *   pMultisampleState   = {};
    const PipelineDepthStencilStateCreateInfo *  pDepthStencilState  = {};
    const PipelineColorBlendStateCreateInfo *    pColorBlendState    = {};
    const PipelineDynamicStateCreateInfo *       pDynamicState       = {};
    PipelineLayout                               layout              = {};
    RenderPass                                   renderPass          = {};
    uint32_t                                     subpass             = {};
    Pipeline                                     basePipelineHandle  = {};
    int32_t                                      basePipelineIndex   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGraphicsPipelineCreateInfo>
  {
    using Type = GraphicsPipelineCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
  {
    using Type = GraphicsPipelineCreateInfo;
  };

  // wrapper struct for struct VkGraphicsPipelineLibraryCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryCreateInfoEXT.html
  struct GraphicsPipelineLibraryCreateInfoEXT
  {
    using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast<GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs ) )
    {
    }

    GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GraphicsPipelineLibraryCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
    }

    operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
    }

    operator VkGraphicsPipelineLibraryCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
    }

    operator VkGraphicsPipelineLibraryCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGraphicsPipelineLibraryCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, GraphicsPipelineLibraryFlagsEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default;
#else
    bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT;
    const void *                    pNext = {};
    GraphicsPipelineLibraryFlagsEXT flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGraphicsPipelineLibraryCreateInfoEXT>
  {
    using Type = GraphicsPipelineLibraryCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGraphicsPipelineLibraryCreateInfoEXT>
  {
    using Type = GraphicsPipelineLibraryCreateInfoEXT;
  };

  // wrapper struct for struct VkGraphicsShaderGroupCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsShaderGroupCreateInfoNV.html
  struct GraphicsShaderGroupCreateInfoNV
  {
    using NativeType = VkGraphicsShaderGroupCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGraphicsShaderGroupCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t                                    stageCount_         = {},
                                                          const PipelineShaderStageCreateInfo *       pStages_            = {},
                                                          const PipelineVertexInputStateCreateInfo *  pVertexInputState_  = {},
                                                          const PipelineTessellationStateCreateInfo * pTessellationState_ = {},
                                                          const void *                                pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stageCount{ stageCount_ }
      , pStages{ pStages_ }
      , pVertexInputState{ pVertexInputState_ }
      , pTessellationState{ pTessellationState_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsShaderGroupCreateInfoNV( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_,
                                     const PipelineVertexInputStateCreateInfo *                           pVertexInputState_  = {},
                                     const PipelineTessellationStateCreateInfo *                          pTessellationState_ = {},
                                     const void *                                                         pNext_              = nullptr )
      : pNext( pNext_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , pVertexInputState( pVertexInputState_ )
      , pTessellationState( pTessellationState_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsShaderGroupCreateInfoNV & setStages( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
      setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexInputState = pVertexInputState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
      setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pTessellationState = pTessellationState_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

    operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

    operator VkGraphicsShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

    operator VkGraphicsShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const PipelineShaderStageCreateInfo * const &,
               const PipelineVertexInputStateCreateInfo * const &,
               const PipelineTessellationStateCreateInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default;
#else
    bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState );
#  endif
    }

    bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                               sType              = StructureType::eGraphicsShaderGroupCreateInfoNV;
    const void *                                pNext              = {};
    uint32_t                                    stageCount         = {};
    const PipelineShaderStageCreateInfo *       pStages            = {};
    const PipelineVertexInputStateCreateInfo *  pVertexInputState  = {};
    const PipelineTessellationStateCreateInfo * pTessellationState = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGraphicsShaderGroupCreateInfoNV>
  {
    using Type = GraphicsShaderGroupCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
  {
    using Type = GraphicsShaderGroupCreateInfoNV;
  };

  // wrapper struct for struct VkGraphicsPipelineShaderGroupsCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineShaderGroupsCreateInfoNV.html
  struct GraphicsPipelineShaderGroupsCreateInfoNV
  {
    using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t                                groupCount_    = {},
                                                                   const GraphicsShaderGroupCreateInfoNV * pGroups_       = {},
                                                                   uint32_t                                pipelineCount_ = {},
                                                                   const Pipeline *                        pPipelines_    = {},
                                                                   const void *                            pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , groupCount{ groupCount_ }
      , pGroups{ pGroups_ }
      , pipelineCount{ pipelineCount_ }
      , pPipelines{ pPipelines_ }
    {
    }

    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV( ArrayProxyNoTemporaries<const GraphicsShaderGroupCreateInfoNV> const & groups_,
                                              ArrayProxyNoTemporaries<const Pipeline> const &                        pipelines_ = {},
                                              const void *                                                           pNext_     = nullptr )
      : pNext( pNext_ )
      , groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , pipelineCount( static_cast<uint32_t>( pipelines_.size() ) )
      , pPipelines( pipelines_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( ArrayProxyNoTemporaries<const GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCount = pipelineCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelines = pPipelines_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( ArrayProxyNoTemporaries<const Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCount = static_cast<uint32_t>( pipelines_.size() );
      pPipelines    = pipelines_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const GraphicsShaderGroupCreateInfoNV * const &,
               uint32_t const &,
               const Pipeline * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default;
#else
    bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) &&
             ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines );
#  endif
    }

    bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType         = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
    const void *                            pNext         = {};
    uint32_t                                groupCount    = {};
    const GraphicsShaderGroupCreateInfoNV * pGroups       = {};
    uint32_t                                pipelineCount = {};
    const Pipeline *                        pPipelines    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkGraphicsPipelineShaderGroupsCreateInfoNV>
  {
    using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
  {
    using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
  };

  // wrapper struct for struct VkXYColorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXYColorEXT.html
  struct XYColorEXT
  {
    using NativeType = VkXYColorEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
    {
    }

    VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) ) {}

    XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<XYColorEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXYColorEXT *>( this );
    }

    operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXYColorEXT *>( this );
    }

    operator VkXYColorEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkXYColorEXT *>( this );
    }

    operator VkXYColorEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkXYColorEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( XYColorEXT const & ) const = default;
#else
    bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y );
#  endif
    }

    bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x = {};
    float y = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkXYColorEXT>
  {
    using Type = XYColorEXT;
  };
#endif

  // wrapper struct for struct VkHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrMetadataEXT.html
  struct HdrMetadataEXT
  {
    using NativeType = VkHdrMetadataEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eHdrMetadataEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( XYColorEXT   displayPrimaryRed_         = {},
                                         XYColorEXT   displayPrimaryGreen_       = {},
                                         XYColorEXT   displayPrimaryBlue_        = {},
                                         XYColorEXT   whitePoint_                = {},
                                         float        maxLuminance_              = {},
                                         float        minLuminance_              = {},
                                         float        maxContentLightLevel_      = {},
                                         float        maxFrameAverageLightLevel_ = {},
                                         const void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displayPrimaryRed{ displayPrimaryRed_ }
      , displayPrimaryGreen{ displayPrimaryGreen_ }
      , displayPrimaryBlue{ displayPrimaryBlue_ }
      , whitePoint{ whitePoint_ }
      , maxLuminance{ maxLuminance_ }
      , minLuminance{ minLuminance_ }
      , maxContentLightLevel{ maxContentLightLevel_ }
      , maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) ) {}

    HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<HdrMetadataEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryRed = displayPrimaryRed_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryGreen = displayPrimaryGreen_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryBlue = displayPrimaryBlue_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
    {
      whitePoint = whitePoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLuminance = maxLuminance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
    {
      minLuminance = minLuminance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      maxContentLightLevel = maxContentLightLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHdrMetadataEXT *>( this );
    }

    operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHdrMetadataEXT *>( this );
    }

    operator VkHdrMetadataEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkHdrMetadataEXT *>( this );
    }

    operator VkHdrMetadataEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkHdrMetadataEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               XYColorEXT const &,
               XYColorEXT const &,
               XYColorEXT const &,
               XYColorEXT const &,
               float const &,
               float const &,
               float const &,
               float const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       displayPrimaryRed,
                       displayPrimaryGreen,
                       displayPrimaryBlue,
                       whitePoint,
                       maxLuminance,
                       minLuminance,
                       maxContentLightLevel,
                       maxFrameAverageLightLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HdrMetadataEXT const & ) const = default;
#else
    bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) &&
             ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) &&
             ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) &&
             ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
#  endif
    }

    bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::eHdrMetadataEXT;
    const void *  pNext                     = {};
    XYColorEXT    displayPrimaryRed         = {};
    XYColorEXT    displayPrimaryGreen       = {};
    XYColorEXT    displayPrimaryBlue        = {};
    XYColorEXT    whitePoint                = {};
    float         maxLuminance              = {};
    float         minLuminance              = {};
    float         maxContentLightLevel      = {};
    float         maxFrameAverageLightLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkHdrMetadataEXT>
  {
    using Type = HdrMetadataEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eHdrMetadataEXT>
  {
    using Type = HdrMetadataEXT;
  };

  // wrapper struct for struct VkHdrVividDynamicMetadataHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrVividDynamicMetadataHUAWEI.html
  struct HdrVividDynamicMetadataHUAWEI
  {
    using NativeType = VkHdrVividDynamicMetadataHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eHdrVividDynamicMetadataHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dynamicMetadataSize{ dynamicMetadataSize_ }
      , pDynamicMetadata{ pDynamicMetadata_ }
    {
    }

    VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast<HdrVividDynamicMetadataHUAWEI const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    HdrVividDynamicMetadataHUAWEI( ArrayProxyNoTemporaries<const T> const & dynamicMetadata_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<HdrVividDynamicMetadataHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicMetadataSize = dynamicMetadataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicMetadata = pDynamicMetadata_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( ArrayProxyNoTemporaries<const T> const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T );
      pDynamicMetadata    = dynamicMetadata_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHdrVividDynamicMetadataHUAWEI *>( this );
    }

    operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHdrVividDynamicMetadataHUAWEI *>( this );
    }

    operator VkHdrVividDynamicMetadataHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkHdrVividDynamicMetadataHUAWEI *>( this );
    }

    operator VkHdrVividDynamicMetadataHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkHdrVividDynamicMetadataHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, size_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default;
#else
    bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) &&
             ( pDynamicMetadata == rhs.pDynamicMetadata );
#  endif
    }

    bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eHdrVividDynamicMetadataHUAWEI;
    const void *  pNext               = {};
    size_t        dynamicMetadataSize = {};
    const void *  pDynamicMetadata    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkHdrVividDynamicMetadataHUAWEI>
  {
    using Type = HdrVividDynamicMetadataHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eHdrVividDynamicMetadataHUAWEI>
  {
    using Type = HdrVividDynamicMetadataHUAWEI;
  };

  // wrapper struct for struct VkHeadlessSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHeadlessSurfaceCreateInfoEXT.html
  struct HeadlessSurfaceCreateInfoEXT
  {
    using NativeType = VkHeadlessSurfaceCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eHeadlessSurfaceCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
    {
    }

    HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

    operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

    operator VkHeadlessSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

    operator VkHeadlessSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, HeadlessSurfaceCreateFlagsEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
#else
    bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
    const void *                  pNext = {};
    HeadlessSurfaceCreateFlagsEXT flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkHeadlessSurfaceCreateInfoEXT>
  {
    using Type = HeadlessSurfaceCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
  {
    using Type = HeadlessSurfaceCreateInfoEXT;
  };

  // wrapper struct for struct VkHostImageCopyDevicePerformanceQuery, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyDevicePerformanceQuery.html
  struct HostImageCopyDevicePerformanceQuery
  {
    using NativeType = VkHostImageCopyDevicePerformanceQuery;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eHostImageCopyDevicePerformanceQuery;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      HostImageCopyDevicePerformanceQuery( Bool32 optimalDeviceAccess_ = {}, Bool32 identicalMemoryLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , optimalDeviceAccess{ optimalDeviceAccess_ }
      , identicalMemoryLayout{ identicalMemoryLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT
      : HostImageCopyDevicePerformanceQuery( *reinterpret_cast<HostImageCopyDevicePerformanceQuery const *>( &rhs ) )
    {
    }

    HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<HostImageCopyDevicePerformanceQuery const *>( &rhs );
      return *this;
    }

    operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHostImageCopyDevicePerformanceQuery *>( this );
    }

    operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHostImageCopyDevicePerformanceQuery *>( this );
    }

    operator VkHostImageCopyDevicePerformanceQuery const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkHostImageCopyDevicePerformanceQuery *>( this );
    }

    operator VkHostImageCopyDevicePerformanceQuery *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkHostImageCopyDevicePerformanceQuery *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default;
#else
    bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) &&
             ( identicalMemoryLayout == rhs.identicalMemoryLayout );
#  endif
    }

    bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::eHostImageCopyDevicePerformanceQuery;
    void *        pNext                 = {};
    Bool32        optimalDeviceAccess   = {};
    Bool32        identicalMemoryLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkHostImageCopyDevicePerformanceQuery>
  {
    using Type = HostImageCopyDevicePerformanceQuery;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eHostImageCopyDevicePerformanceQuery>
  {
    using Type = HostImageCopyDevicePerformanceQuery;
  };

  using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery;

  // wrapper struct for struct VkHostImageLayoutTransitionInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageLayoutTransitionInfo.html
  struct HostImageLayoutTransitionInfo
  {
    using NativeType = VkHostImageLayoutTransitionInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eHostImageLayoutTransitionInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( Image                 image_            = {},
                                                        ImageLayout           oldLayout_        = ImageLayout::eUndefined,
                                                        ImageLayout           newLayout_        = ImageLayout::eUndefined,
                                                        ImageSubresourceRange subresourceRange_ = {},
                                                        const void *          pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , oldLayout{ oldLayout_ }
      , newLayout{ newLayout_ }
      , subresourceRange{ subresourceRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : HostImageLayoutTransitionInfo( *reinterpret_cast<HostImageLayoutTransitionInfo const *>( &rhs ) )
    {
    }

    HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<HostImageLayoutTransitionInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      oldLayout = oldLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      newLayout = newLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( this );
    }

    operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHostImageLayoutTransitionInfo *>( this );
    }

    operator VkHostImageLayoutTransitionInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( this );
    }

    operator VkHostImageLayoutTransitionInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkHostImageLayoutTransitionInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &, ImageLayout const &, ImageLayout const &, ImageSubresourceRange const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default;
#else
    bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) &&
             ( subresourceRange == rhs.subresourceRange );
#  endif
    }

    bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType            = StructureType::eHostImageLayoutTransitionInfo;
    const void *          pNext            = {};
    Image                 image            = {};
    ImageLayout           oldLayout        = ImageLayout::eUndefined;
    ImageLayout           newLayout        = ImageLayout::eUndefined;
    ImageSubresourceRange subresourceRange = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkHostImageLayoutTransitionInfo>
  {
    using Type = HostImageLayoutTransitionInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eHostImageLayoutTransitionInfo>
  {
    using Type = HostImageLayoutTransitionInfo;
  };

  using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo;

#if defined( VK_USE_PLATFORM_IOS_MVK )
  // wrapper struct for struct VkIOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIOSSurfaceCreateInfoMVK.html
  struct IOSSurfaceCreateInfoMVK
  {
    using NativeType = VkIOSSurfaceCreateInfoMVK;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIosSurfaceCreateInfoMVK;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pView{ pView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
      : IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
    {
    }

    IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
    {
      pView = pView_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkIOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkIOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, IOSSurfaceCreateFlagsMVK const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pView );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default;
#  else
    bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
#    endif
    }

    bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType            sType = StructureType::eIosSurfaceCreateInfoMVK;
    const void *             pNext = {};
    IOSSurfaceCreateFlagsMVK flags = {};
    const void *             pView = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIOSSurfaceCreateInfoMVK>
  {
    using Type = IOSSurfaceCreateInfoMVK;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
  {
    using Type = IOSSurfaceCreateInfoMVK;
  };
#endif /*VK_USE_PLATFORM_IOS_MVK*/

  // wrapper struct for struct VkImageAlignmentControlCreateInfoMESA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAlignmentControlCreateInfoMESA.html
  struct ImageAlignmentControlCreateInfoMESA
  {
    using NativeType = VkImageAlignmentControlCreateInfoMESA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageAlignmentControlCreateInfoMESA;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maximumRequestedAlignment{ maximumRequestedAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageAlignmentControlCreateInfoMESA( *reinterpret_cast<ImageAlignmentControlCreateInfoMESA const *>( &rhs ) )
    {
    }

    ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageAlignmentControlCreateInfoMESA const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) VULKAN_HPP_NOEXCEPT
    {
      maximumRequestedAlignment = maximumRequestedAlignment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageAlignmentControlCreateInfoMESA *>( this );
    }

    operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageAlignmentControlCreateInfoMESA *>( this );
    }

    operator VkImageAlignmentControlCreateInfoMESA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageAlignmentControlCreateInfoMESA *>( this );
    }

    operator VkImageAlignmentControlCreateInfoMESA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageAlignmentControlCreateInfoMESA *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maximumRequestedAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default;
#else
    bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maximumRequestedAlignment == rhs.maximumRequestedAlignment );
#  endif
    }

    bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::eImageAlignmentControlCreateInfoMESA;
    const void *  pNext                     = {};
    uint32_t      maximumRequestedAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageAlignmentControlCreateInfoMESA>
  {
    using Type = ImageAlignmentControlCreateInfoMESA;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageAlignmentControlCreateInfoMESA>
  {
    using Type = ImageAlignmentControlCreateInfoMESA;
  };

  // wrapper struct for struct VkImageBlit, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit.html
  struct ImageBlit
  {
    using NativeType = VkImageBlit;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageSubresourceLayers          srcSubresource_ = {},
                                       std::array<Offset3D, 2> const & srcOffsets_     = {},
                                       ImageSubresourceLayers          dstSubresource_ = {},
                                       std::array<Offset3D, 2> const & dstOffsets_     = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource{ srcSubresource_ }
      , srcOffsets{ srcOffsets_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffsets{ dstOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) ) {}

    ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageBlit const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffsets = srcOffsets_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffsets = dstOffsets_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageBlit *>( this );
    }

    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageBlit *>( this );
    }

    operator VkImageBlit const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageBlit *>( this );
    }

    operator VkImageBlit *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageBlit *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageSubresourceLayers const &, ArrayWrapper1D<Offset3D, 2> const &, ImageSubresourceLayers const &, ArrayWrapper1D<Offset3D, 2> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageBlit const & ) const = default;
#else
    bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffsets == rhs.dstOffsets );
#  endif
    }

    bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageSubresourceLayers      srcSubresource = {};
    ArrayWrapper1D<Offset3D, 2> srcOffsets     = {};
    ImageSubresourceLayers      dstSubresource = {};
    ArrayWrapper1D<Offset3D, 2> dstOffsets     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageBlit>
  {
    using Type = ImageBlit;
  };
#endif

  // wrapper struct for struct VkImageCaptureDescriptorDataInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCaptureDescriptorDataInfoEXT.html
  struct ImageCaptureDescriptorDataInfoEXT
  {
    using NativeType = VkImageCaptureDescriptorDataInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageCaptureDescriptorDataInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageCaptureDescriptorDataInfoEXT const *>( &rhs ) )
    {
    }

    ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCaptureDescriptorDataInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCaptureDescriptorDataInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default;
#else
    bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
#  endif
    }

    bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT;
    const void *  pNext = {};
    Image         image = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCaptureDescriptorDataInfoEXT>
  {
    using Type = ImageCaptureDescriptorDataInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageCaptureDescriptorDataInfoEXT>
  {
    using Type = ImageCaptureDescriptorDataInfoEXT;
  };

  // wrapper struct for struct VkImageCompressionControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionControlEXT.html
  struct ImageCompressionControlEXT
  {
    using NativeType = VkImageCompressionControlEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageCompressionControlEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionFlagsEXT            flags_                        = {},
                                                     uint32_t                            compressionControlPlaneCount_ = {},
                                                     ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_              = {},
                                                     const void *                        pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , compressionControlPlaneCount{ compressionControlPlaneCount_ }
      , pFixedRateFlags{ pFixedRateFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageCompressionControlEXT( *reinterpret_cast<ImageCompressionControlEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCompressionControlEXT( ImageCompressionFlagsEXT                                           flags_,
                                ArrayProxyNoTemporaries<ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_,
                                const void *                                                       pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , compressionControlPlaneCount( static_cast<uint32_t>( fixedRateFlags_.size() ) )
      , pFixedRateFlags( fixedRateFlags_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCompressionControlEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT
    {
      compressionControlPlaneCount = compressionControlPlaneCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPFixedRateFlags( ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      pFixedRateFlags = pFixedRateFlags_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCompressionControlEXT & setFixedRateFlags( ArrayProxyNoTemporaries<ImageCompressionFixedRateFlagsEXT> const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      compressionControlPlaneCount = static_cast<uint32_t>( fixedRateFlags_.size() );
      pFixedRateFlags              = fixedRateFlags_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCompressionControlEXT *>( this );
    }

    operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCompressionControlEXT *>( this );
    }

    operator VkImageCompressionControlEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCompressionControlEXT *>( this );
    }

    operator VkImageCompressionControlEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCompressionControlEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageCompressionFlagsEXT const &, uint32_t const &, ImageCompressionFixedRateFlagsEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCompressionControlEXT const & ) const = default;
#else
    bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags );
#  endif
    }

    bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType                        = StructureType::eImageCompressionControlEXT;
    const void *                        pNext                        = {};
    ImageCompressionFlagsEXT            flags                        = {};
    uint32_t                            compressionControlPlaneCount = {};
    ImageCompressionFixedRateFlagsEXT * pFixedRateFlags              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCompressionControlEXT>
  {
    using Type = ImageCompressionControlEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageCompressionControlEXT>
  {
    using Type = ImageCompressionControlEXT;
  };

  // wrapper struct for struct VkImageCompressionPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionPropertiesEXT.html
  struct ImageCompressionPropertiesEXT
  {
    using NativeType = VkImageCompressionPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageCompressionPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionFlagsEXT          imageCompressionFlags_          = {},
                                                        ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {},
                                                        void *                            pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageCompressionFlags{ imageCompressionFlags_ }
      , imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageCompressionPropertiesEXT( *reinterpret_cast<ImageCompressionPropertiesEXT const *>( &rhs ) )
    {
    }

    ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCompressionPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCompressionPropertiesEXT *>( this );
    }

    operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCompressionPropertiesEXT *>( this );
    }

    operator VkImageCompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCompressionPropertiesEXT *>( this );
    }

    operator VkImageCompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCompressionPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageCompressionFlagsEXT const &, ImageCompressionFixedRateFlagsEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default;
#else
    bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) &&
             ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags );
#  endif
    }

    bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                          = StructureType::eImageCompressionPropertiesEXT;
    void *                            pNext                          = {};
    ImageCompressionFlagsEXT          imageCompressionFlags          = {};
    ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCompressionPropertiesEXT>
  {
    using Type = ImageCompressionPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageCompressionPropertiesEXT>
  {
    using Type = ImageCompressionPropertiesEXT;
  };

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImageFormatConstraintsInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatConstraintsInfoFUCHSIA.html
  struct ImageFormatConstraintsInfoFUCHSIA
  {
    using NativeType = VkImageFormatConstraintsInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageFormatConstraintsInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageCreateInfo                    imageCreateInfo_        = {},
                                                            FormatFeatureFlags                 requiredFormatFeatures_ = {},
                                                            ImageFormatConstraintsFlagsFUCHSIA flags_                  = {},
                                                            uint64_t                           sysmemPixelFormat_      = {},
                                                            uint32_t                           colorSpaceCount_        = {},
                                                            const SysmemColorSpaceFUCHSIA *    pColorSpaces_           = {},
                                                            const void *                       pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageCreateInfo{ imageCreateInfo_ }
      , requiredFormatFeatures{ requiredFormatFeatures_ }
      , flags{ flags_ }
      , sysmemPixelFormat{ sysmemPixelFormat_ }
      , colorSpaceCount{ colorSpaceCount_ }
      , pColorSpaces{ pColorSpaces_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatConstraintsInfoFUCHSIA( ImageCreateInfo                                                imageCreateInfo_,
                                       FormatFeatureFlags                                             requiredFormatFeatures_,
                                       ImageFormatConstraintsFlagsFUCHSIA                             flags_,
                                       uint64_t                                                       sysmemPixelFormat_,
                                       ArrayProxyNoTemporaries<const SysmemColorSpaceFUCHSIA> const & colorSpaces_,
                                       const void *                                                   pNext_ = nullptr )
      : pNext( pNext_ )
      , imageCreateInfo( imageCreateInfo_ )
      , requiredFormatFeatures( requiredFormatFeatures_ )
      , flags( flags_ )
      , sysmemPixelFormat( sysmemPixelFormat_ )
      , colorSpaceCount( static_cast<uint32_t>( colorSpaces_.size() ) )
      , pColorSpaces( colorSpaces_.data() )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo( ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCreateInfo = imageCreateInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      requiredFormatFeatures = requiredFormatFeatures_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      sysmemPixelFormat = sysmemPixelFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorSpaceCount = colorSpaceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces( const SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorSpaces = pColorSpaces_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( ArrayProxyNoTemporaries<const SysmemColorSpaceFUCHSIA> const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT
    {
      colorSpaceCount = static_cast<uint32_t>( colorSpaces_.size() );
      pColorSpaces    = colorSpaces_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageFormatConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageFormatConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageCreateInfo const &,
               FormatFeatureFlags const &,
               ImageFormatConstraintsFlagsFUCHSIA const &,
               uint64_t const &,
               uint32_t const &,
               const SysmemColorSpaceFUCHSIA * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) &&
             ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) &&
             ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces );
#    endif
    }

    bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                      sType                  = StructureType::eImageFormatConstraintsInfoFUCHSIA;
    const void *                       pNext                  = {};
    ImageCreateInfo                    imageCreateInfo        = {};
    FormatFeatureFlags                 requiredFormatFeatures = {};
    ImageFormatConstraintsFlagsFUCHSIA flags                  = {};
    uint64_t                           sysmemPixelFormat      = {};
    uint32_t                           colorSpaceCount        = {};
    const SysmemColorSpaceFUCHSIA *    pColorSpaces           = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageFormatConstraintsInfoFUCHSIA>
  {
    using Type = ImageFormatConstraintsInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImageFormatConstraintsInfoFUCHSIA>
  {
    using Type = ImageFormatConstraintsInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImageConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFUCHSIA.html
  struct ImageConstraintsInfoFUCHSIA
  {
    using NativeType = VkImageConstraintsInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageConstraintsInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t                                  formatConstraintsCount_      = {},
                                                      const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_          = {},
                                                      BufferCollectionConstraintsInfoFUCHSIA    bufferCollectionConstraints_ = {},
                                                      ImageConstraintsInfoFlagsFUCHSIA          flags_                       = {},
                                                      const void *                              pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , formatConstraintsCount{ formatConstraintsCount_ }
      , pFormatConstraints{ pFormatConstraints_ }
      , bufferCollectionConstraints{ bufferCollectionConstraints_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageConstraintsInfoFUCHSIA( *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageConstraintsInfoFUCHSIA( ArrayProxyNoTemporaries<const ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_,
                                 BufferCollectionConstraintsInfoFUCHSIA                                   bufferCollectionConstraints_ = {},
                                 ImageConstraintsInfoFlagsFUCHSIA                                         flags_                       = {},
                                 const void *                                                             pNext_                       = nullptr )
      : pNext( pNext_ )
      , formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) )
      , pFormatConstraints( formatConstraints_.data() )
      , bufferCollectionConstraints( bufferCollectionConstraints_ )
      , flags( flags_ )
    {
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      formatConstraintsCount = formatConstraintsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
      setPFormatConstraints( const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
    {
      pFormatConstraints = pFormatConstraints_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageConstraintsInfoFUCHSIA &
      setFormatConstraints( ArrayProxyNoTemporaries<const ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT
    {
      formatConstraintsCount = static_cast<uint32_t>( formatConstraints_.size() );
      pFormatConstraints     = formatConstraints_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &
      setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferCollectionConstraints = bufferCollectionConstraints_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( this );
    }

    operator VkImageConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageConstraintsInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const ImageFormatConstraintsInfoFUCHSIA * const &,
               BufferCollectionConstraintsInfoFUCHSIA const &,
               ImageConstraintsInfoFlagsFUCHSIA const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) &&
             ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags );
#    endif
    }

    bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                             sType                       = StructureType::eImageConstraintsInfoFUCHSIA;
    const void *                              pNext                       = {};
    uint32_t                                  formatConstraintsCount      = {};
    const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints          = {};
    BufferCollectionConstraintsInfoFUCHSIA    bufferCollectionConstraints = {};
    ImageConstraintsInfoFlagsFUCHSIA          flags                       = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageConstraintsInfoFUCHSIA>
  {
    using Type = ImageConstraintsInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImageConstraintsInfoFUCHSIA>
  {
    using Type = ImageConstraintsInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy.html
  struct ImageCopy
  {
    using NativeType = VkImageCopy;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCopy( ImageSubresourceLayers srcSubresource_ = {},
                                    Offset3D               srcOffset_      = {},
                                    ImageSubresourceLayers dstSubresource_ = {},
                                    Offset3D               dstOffset_      = {},
                                    Extent3D               extent_         = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource{ srcSubresource_ }
      , srcOffset{ srcOffset_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffset{ dstOffset_ }
      , extent{ extent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) ) {}

    ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageCopy const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCopy *>( this );
    }

    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCopy *>( this );
    }

    operator VkImageCopy const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageCopy *>( this );
    }

    operator VkImageCopy *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageCopy *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageSubresourceLayers const &, Offset3D const &, ImageSubresourceLayers const &, Offset3D const &, Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCopy const & ) const = default;
#else
    bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
#  endif
    }

    bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageSubresourceLayers srcSubresource = {};
    Offset3D               srcOffset      = {};
    ImageSubresourceLayers dstSubresource = {};
    Offset3D               dstOffset      = {};
    Extent3D               extent         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageCopy>
  {
    using Type = ImageCopy;
  };
#endif

  // wrapper struct for struct VkSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout.html
  struct SubresourceLayout
  {
    using NativeType = VkSubresourceLayout;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubresourceLayout(
      DeviceSize offset_ = {}, DeviceSize size_ = {}, DeviceSize rowPitch_ = {}, DeviceSize arrayPitch_ = {}, DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT
      : offset{ offset_ }
      , size{ size_ }
      , rowPitch{ rowPitch_ }
      , arrayPitch{ arrayPitch_ }
      , depthPitch{ depthPitch_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) ) {}

    SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubresourceLayout const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT
    {
      rowPitch = rowPitch_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayPitch = arrayPitch_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT
    {
      depthPitch = depthPitch_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubresourceLayout *>( this );
    }

    operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubresourceLayout *>( this );
    }

    operator VkSubresourceLayout const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubresourceLayout *>( this );
    }

    operator VkSubresourceLayout *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubresourceLayout *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, DeviceSize const &, DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( offset, size, rowPitch, arrayPitch, depthPitch );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubresourceLayout const & ) const = default;
#else
    bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) &&
             ( depthPitch == rhs.depthPitch );
#  endif
    }

    bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize offset     = {};
    DeviceSize size       = {};
    DeviceSize rowPitch   = {};
    DeviceSize arrayPitch = {};
    DeviceSize depthPitch = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubresourceLayout>
  {
    using Type = SubresourceLayout;
  };
#endif

  // wrapper struct for struct VkImageDrmFormatModifierExplicitCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html
  struct ImageDrmFormatModifierExplicitCreateInfoEXT
  {
    using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t                  drmFormatModifier_           = {},
                                                                      uint32_t                  drmFormatModifierPlaneCount_ = {},
                                                                      const SubresourceLayout * pPlaneLayouts_               = {},
                                                                      const void *              pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifier{ drmFormatModifier_ }
      , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ }
      , pPlaneLayouts{ pPlaneLayouts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t                                                 drmFormatModifier_,
                                                 ArrayProxyNoTemporaries<const SubresourceLayout> const & planeLayouts_,
                                                 const void *                                             pNext_ = nullptr )
      : pNext( pNext_ )
      , drmFormatModifier( drmFormatModifier_ )
      , drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) )
      , pPlaneLayouts( planeLayouts_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifier = drmFormatModifier_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
      setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pPlaneLayouts = pPlaneLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( ArrayProxyNoTemporaries<const SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
      pPlaneLayouts               = planeLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, uint32_t const &, const SubresourceLayout * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
             ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts );
#  endif
    }

    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                       = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
    const void *              pNext                       = {};
    uint64_t                  drmFormatModifier           = {};
    uint32_t                  drmFormatModifierPlaneCount = {};
    const SubresourceLayout * pPlaneLayouts               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageDrmFormatModifierExplicitCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
  };

  // wrapper struct for struct VkImageDrmFormatModifierListCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html
  struct ImageDrmFormatModifierListCreateInfoEXT
  {
    using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageDrmFormatModifierListCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t         drmFormatModifierCount_ = {},
                                                                  const uint64_t * pDrmFormatModifiers_    = {},
                                                                  const void *     pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifierCount{ drmFormatModifierCount_ }
      , pDrmFormatModifiers{ pDrmFormatModifiers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierListCreateInfoEXT( ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierCount = drmFormatModifierCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
    {
      pDrmFormatModifiers = pDrmFormatModifiers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
      pDrmFormatModifiers    = drmFormatModifiers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierListCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierListCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
             ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
#  endif
    }

    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                  = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
    const void *     pNext                  = {};
    uint32_t         drmFormatModifierCount = {};
    const uint64_t * pDrmFormatModifiers    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageDrmFormatModifierListCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierListCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierListCreateInfoEXT;
  };

  // wrapper struct for struct VkImageDrmFormatModifierPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierPropertiesEXT.html
  struct ImageDrmFormatModifierPropertiesEXT
  {
    using NativeType = VkImageDrmFormatModifierPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageDrmFormatModifierPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifier{ drmFormatModifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
    {
    }

    ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkImageDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkImageDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier );
#  endif
    }

    bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eImageDrmFormatModifierPropertiesEXT;
    void *        pNext             = {};
    uint64_t      drmFormatModifier = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageDrmFormatModifierPropertiesEXT>
  {
    using Type = ImageDrmFormatModifierPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
  {
    using Type = ImageDrmFormatModifierPropertiesEXT;
  };

  // wrapper struct for struct VkImageFormatListCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatListCreateInfo.html
  struct ImageFormatListCreateInfo
  {
    using NativeType = VkImageFormatListCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageFormatListCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, const Format * pViewFormats_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , viewFormatCount{ viewFormatCount_ }
      , pViewFormats{ pViewFormats_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatListCreateInfo( ArrayProxyNoTemporaries<const Format> const & viewFormats_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = viewFormatCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewFormats = pViewFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatListCreateInfo & setViewFormats( ArrayProxyNoTemporaries<const Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
      pViewFormats    = viewFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatListCreateInfo *>( this );
    }

    operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatListCreateInfo *>( this );
    }

    operator VkImageFormatListCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageFormatListCreateInfo *>( this );
    }

    operator VkImageFormatListCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageFormatListCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Format * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, viewFormatCount, pViewFormats );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
#else
    bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
#  endif
    }

    bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType           = StructureType::eImageFormatListCreateInfo;
    const void *   pNext           = {};
    uint32_t       viewFormatCount = {};
    const Format * pViewFormats    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageFormatListCreateInfo>
  {
    using Type = ImageFormatListCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
  {
    using Type = ImageFormatListCreateInfo;
  };

  using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;

  // wrapper struct for struct VkImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties2.html
  struct ImageFormatProperties2
  {
    using NativeType = VkImageFormatProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageFormatProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties imageFormatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageFormatProperties{ imageFormatProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
    {
    }

    ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageFormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatProperties2 *>( this );
    }

    operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatProperties2 *>( this );
    }

    operator VkImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageFormatProperties2 *>( this );
    }

    operator VkImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageFormatProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageFormatProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatProperties2 const & ) const = default;
#else
    bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties );
#  endif
    }

    bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType                 = StructureType::eImageFormatProperties2;
    void *                pNext                 = {};
    ImageFormatProperties imageFormatProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageFormatProperties2>
  {
    using Type = ImageFormatProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageFormatProperties2>
  {
    using Type = ImageFormatProperties2;
  };

  using ImageFormatProperties2KHR = ImageFormatProperties2;

  // wrapper struct for struct VkImageMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier.html
  struct ImageMemoryBarrier
  {
    using NativeType = VkImageMemoryBarrier;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageMemoryBarrier;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( AccessFlags           srcAccessMask_       = {},
                                             AccessFlags           dstAccessMask_       = {},
                                             ImageLayout           oldLayout_           = ImageLayout::eUndefined,
                                             ImageLayout           newLayout_           = ImageLayout::eUndefined,
                                             uint32_t              srcQueueFamilyIndex_ = {},
                                             uint32_t              dstQueueFamilyIndex_ = {},
                                             Image                 image_               = {},
                                             ImageSubresourceRange subresourceRange_    = {},
                                             const void *          pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , oldLayout{ oldLayout_ }
      , newLayout{ newLayout_ }
      , srcQueueFamilyIndex{ srcQueueFamilyIndex_ }
      , dstQueueFamilyIndex{ dstQueueFamilyIndex_ }
      , image{ image_ }
      , subresourceRange{ subresourceRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) ) {}

    ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageMemoryBarrier const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      oldLayout = oldLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      newLayout = newLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryBarrier *>( this );
    }

    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryBarrier *>( this );
    }

    operator VkImageMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageMemoryBarrier *>( this );
    }

    operator VkImageMemoryBarrier *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               AccessFlags const &,
               AccessFlags const &,
               ImageLayout const &,
               ImageLayout const &,
               uint32_t const &,
               uint32_t const &,
               Image const &,
               ImageSubresourceRange const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryBarrier const & ) const = default;
#else
    bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
             ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange );
#  endif
    }

    bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType               = StructureType::eImageMemoryBarrier;
    const void *          pNext               = {};
    AccessFlags           srcAccessMask       = {};
    AccessFlags           dstAccessMask       = {};
    ImageLayout           oldLayout           = ImageLayout::eUndefined;
    ImageLayout           newLayout           = ImageLayout::eUndefined;
    uint32_t              srcQueueFamilyIndex = {};
    uint32_t              dstQueueFamilyIndex = {};
    Image                 image               = {};
    ImageSubresourceRange subresourceRange    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageMemoryBarrier>
  {
    using Type = ImageMemoryBarrier;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
  {
    using Type = ImageMemoryBarrier;
  };

  // wrapper struct for struct VkImageMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryRequirementsInfo2.html
  struct ImageMemoryRequirementsInfo2
  {
    using NativeType = VkImageMemoryRequirementsInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
    {
    }

    ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
#  endif
    }

    bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
    const void *  pNext = {};
    Image         image = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageMemoryRequirementsInfo2>
  {
    using Type = ImageMemoryRequirementsInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
  {
    using Type = ImageMemoryRequirementsInfo2;
  };

  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImagePipeSurfaceCreateInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html
  struct ImagePipeSurfaceCreateInfoFUCHSIA
  {
    using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateFlagsFUCHSIA flags_           = {},
                                                            zx_handle_t                        imagePipeHandle_ = {},
                                                            const void *                       pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , imagePipeHandle{ imagePipeHandle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
    {
    }

    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      imagePipeHandle = imagePipeHandle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

    operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

    operator VkImagePipeSurfaceCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

    operator VkImagePipeSurfaceCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImagePipeSurfaceCreateFlagsFUCHSIA const &, zx_handle_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, imagePipeHandle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                      sType           = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
    const void *                       pNext           = {};
    ImagePipeSurfaceCreateFlagsFUCHSIA flags           = {};
    zx_handle_t                        imagePipeHandle = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImagePipeSurfaceCreateInfoFUCHSIA>
  {
    using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
  {
    using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkImagePlaneMemoryRequirementsInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePlaneMemoryRequirementsInfo.html
  struct ImagePlaneMemoryRequirementsInfo
  {
    using NativeType = VkImagePlaneMemoryRequirementsInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImagePlaneMemoryRequirementsInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor,
                                                           const void *        pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , planeAspect{ planeAspect_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
    {
    }

    ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
    {
      planeAspect = planeAspect_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>( this );
    }

    operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>( this );
    }

    operator VkImagePlaneMemoryRequirementsInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>( this );
    }

    operator VkImagePlaneMemoryRequirementsInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageAspectFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, planeAspect );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
#else
    bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
#  endif
    }

    bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType       = StructureType::eImagePlaneMemoryRequirementsInfo;
    const void *        pNext       = {};
    ImageAspectFlagBits planeAspect = ImageAspectFlagBits::eColor;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImagePlaneMemoryRequirementsInfo>
  {
    using Type = ImagePlaneMemoryRequirementsInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
  {
    using Type = ImagePlaneMemoryRequirementsInfo;
  };

  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;

  // wrapper struct for struct VkImageResolve, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve.html
  struct ImageResolve
  {
    using NativeType = VkImageResolve;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageResolve( ImageSubresourceLayers srcSubresource_ = {},
                                       Offset3D               srcOffset_      = {},
                                       ImageSubresourceLayers dstSubresource_ = {},
                                       Offset3D               dstOffset_      = {},
                                       Extent3D               extent_         = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource{ srcSubresource_ }
      , srcOffset{ srcOffset_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffset{ dstOffset_ }
      , extent{ extent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) ) {}

    ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageResolve const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageResolve *>( this );
    }

    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageResolve *>( this );
    }

    operator VkImageResolve const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageResolve *>( this );
    }

    operator VkImageResolve *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageResolve *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageSubresourceLayers const &, Offset3D const &, ImageSubresourceLayers const &, Offset3D const &, Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageResolve const & ) const = default;
#else
    bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
#  endif
    }

    bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageSubresourceLayers srcSubresource = {};
    Offset3D               srcOffset      = {};
    ImageSubresourceLayers dstSubresource = {};
    Offset3D               dstOffset      = {};
    Extent3D               extent         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageResolve>
  {
    using Type = ImageResolve;
  };
#endif

  // wrapper struct for struct VkImageResolve2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve2.html
  struct ImageResolve2
  {
    using NativeType = VkImageResolve2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageResolve2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageResolve2( ImageSubresourceLayers srcSubresource_ = {},
                                        Offset3D               srcOffset_      = {},
                                        ImageSubresourceLayers dstSubresource_ = {},
                                        Offset3D               dstOffset_      = {},
                                        Extent3D               extent_         = {},
                                        const void *           pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSubresource{ srcSubresource_ }
      , srcOffset{ srcOffset_ }
      , dstSubresource{ dstSubresource_ }
      , dstOffset{ dstOffset_ }
      , extent{ extent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) ) {}

    ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageResolve2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageResolve2 *>( this );
    }

    operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageResolve2 *>( this );
    }

    operator VkImageResolve2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageResolve2 *>( this );
    }

    operator VkImageResolve2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageResolve2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               ImageSubresourceLayers const &,
               Offset3D const &,
               Extent3D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageResolve2 const & ) const = default;
#else
    bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
#  endif
    }

    bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType          = StructureType::eImageResolve2;
    const void *           pNext          = {};
    ImageSubresourceLayers srcSubresource = {};
    Offset3D               srcOffset      = {};
    ImageSubresourceLayers dstSubresource = {};
    Offset3D               dstOffset      = {};
    Extent3D               extent         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageResolve2>
  {
    using Type = ImageResolve2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageResolve2>
  {
    using Type = ImageResolve2;
  };

  using ImageResolve2KHR = ImageResolve2;

  // wrapper struct for struct VkImageSparseMemoryRequirementsInfo2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSparseMemoryRequirementsInfo2.html
  struct ImageSparseMemoryRequirementsInfo2
  {
    using NativeType = VkImageSparseMemoryRequirementsInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageSparseMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
    {
    }

    ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageSparseMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageSparseMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
#  endif
    }

    bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
    const void *  pNext = {};
    Image         image = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSparseMemoryRequirementsInfo2>
  {
    using Type = ImageSparseMemoryRequirementsInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
  {
    using Type = ImageSparseMemoryRequirementsInfo2;
  };

  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;

  // wrapper struct for struct VkImageStencilUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageStencilUsageCreateInfo.html
  struct ImageStencilUsageCreateInfo
  {
    using NativeType = VkImageStencilUsageCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageStencilUsageCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageUsageFlags stencilUsage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stencilUsage{ stencilUsage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
    {
    }

    ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilUsage = stencilUsage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageStencilUsageCreateInfo *>( this );
    }

    operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageStencilUsageCreateInfo *>( this );
    }

    operator VkImageStencilUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageStencilUsageCreateInfo *>( this );
    }

    operator VkImageStencilUsageCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageStencilUsageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stencilUsage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
#else
    bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage );
#  endif
    }

    bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType        = StructureType::eImageStencilUsageCreateInfo;
    const void *    pNext        = {};
    ImageUsageFlags stencilUsage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageStencilUsageCreateInfo>
  {
    using Type = ImageStencilUsageCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
  {
    using Type = ImageStencilUsageCreateInfo;
  };

  using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;

  // wrapper struct for struct VkImageSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSwapchainCreateInfoKHR.html
  struct ImageSwapchainCreateInfoKHR
  {
    using NativeType = VkImageSwapchainCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchain{ swapchain_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
    {
    }

    ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>( this );
    }

    operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR *>( this );
    }

    operator VkImageSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>( this );
    }

    operator VkImageSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchain );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain );
#  endif
    }

    bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eImageSwapchainCreateInfoKHR;
    const void *  pNext     = {};
    SwapchainKHR  swapchain = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageSwapchainCreateInfoKHR>
  {
    using Type = ImageSwapchainCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
  {
    using Type = ImageSwapchainCreateInfoKHR;
  };

  // wrapper struct for struct VkImageViewASTCDecodeModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewASTCDecodeModeEXT.html
  struct ImageViewASTCDecodeModeEXT
  {
    using NativeType = VkImageViewASTCDecodeModeEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewAstcDecodeModeEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( Format decodeMode_ = Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , decodeMode{ decodeMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
    {
    }

    ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
    {
      decodeMode = decodeMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>( this );
    }

    operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT *>( this );
    }

    operator VkImageViewASTCDecodeModeEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>( this );
    }

    operator VkImageViewASTCDecodeModeEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewASTCDecodeModeEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Format const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, decodeMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
#else
    bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode );
#  endif
    }

    bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eImageViewAstcDecodeModeEXT;
    const void *  pNext      = {};
    Format        decodeMode = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewASTCDecodeModeEXT>
  {
    using Type = ImageViewASTCDecodeModeEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
  {
    using Type = ImageViewASTCDecodeModeEXT;
  };

  // wrapper struct for struct VkImageViewAddressPropertiesNVX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewAddressPropertiesNVX.html
  struct ImageViewAddressPropertiesNVX
  {
    using NativeType = VkImageViewAddressPropertiesNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewAddressPropertiesNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( DeviceAddress deviceAddress_ = {}, DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceAddress{ deviceAddress_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
    {
    }

    ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs );
      return *this;
    }

    operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewAddressPropertiesNVX *>( this );
    }

    operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewAddressPropertiesNVX *>( this );
    }

    operator VkImageViewAddressPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewAddressPropertiesNVX *>( this );
    }

    operator VkImageViewAddressPropertiesNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewAddressPropertiesNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceAddress const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceAddress, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default;
#else
    bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eImageViewAddressPropertiesNVX;
    void *        pNext         = {};
    DeviceAddress deviceAddress = {};
    DeviceSize    size          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewAddressPropertiesNVX>
  {
    using Type = ImageViewAddressPropertiesNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
  {
    using Type = ImageViewAddressPropertiesNVX;
  };

  // wrapper struct for struct VkImageViewCaptureDescriptorDataInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCaptureDescriptorDataInfoEXT.html
  struct ImageViewCaptureDescriptorDataInfoEXT
  {
    using NativeType = VkImageViewCaptureDescriptorDataInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewCaptureDescriptorDataInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageView imageView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageView{ imageView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast<ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs ) )
    {
    }

    ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewCaptureDescriptorDataInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageViewCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkImageViewCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewCaptureDescriptorDataInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageView const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageView );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default;
#else
    bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView );
#  endif
    }

    bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eImageViewCaptureDescriptorDataInfoEXT;
    const void *  pNext     = {};
    ImageView     imageView = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewCaptureDescriptorDataInfoEXT>
  {
    using Type = ImageViewCaptureDescriptorDataInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewCaptureDescriptorDataInfoEXT>
  {
    using Type = ImageViewCaptureDescriptorDataInfoEXT;
  };

  // wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html
  struct ImageViewCreateInfo
  {
    using NativeType = VkImageViewCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateFlags  flags_            = {},
                                              Image                 image_            = {},
                                              ImageViewType         viewType_         = ImageViewType::e1D,
                                              Format                format_           = Format::eUndefined,
                                              ComponentMapping      components_       = {},
                                              ImageSubresourceRange subresourceRange_ = {},
                                              const void *          pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , image{ image_ }
      , viewType{ viewType_ }
      , format{ format_ }
      , components{ components_ }
      , subresourceRange{ subresourceRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
    {
    }

    ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
    {
      viewType = viewType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
    {
      components = components_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewCreateInfo *>( this );
    }

    operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewCreateInfo *>( this );
    }

    operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewCreateInfo *>( this );
    }

    operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageViewCreateFlags const &,
               Image const &,
               ImageViewType const &,
               Format const &,
               ComponentMapping const &,
               ImageSubresourceRange const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewCreateInfo const & ) const = default;
#else
    bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) &&
             ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange );
#  endif
    }

    bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType            = StructureType::eImageViewCreateInfo;
    const void *          pNext            = {};
    ImageViewCreateFlags  flags            = {};
    Image                 image            = {};
    ImageViewType         viewType         = ImageViewType::e1D;
    Format                format           = Format::eUndefined;
    ComponentMapping      components       = {};
    ImageSubresourceRange subresourceRange = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewCreateInfo>
  {
    using Type = ImageViewCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewCreateInfo>
  {
    using Type = ImageViewCreateInfo;
  };

  // wrapper struct for struct VkImageViewHandleInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewHandleInfoNVX.html
  struct ImageViewHandleInfoNVX
  {
    using NativeType = VkImageViewHandleInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewHandleInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageView      imageView_      = {},
                                                 DescriptorType descriptorType_ = DescriptorType::eSampler,
                                                 Sampler        sampler_        = {},
                                                 const void *   pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageView{ imageView_ }
      , descriptorType{ descriptorType_ }
      , sampler{ sampler_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
    {
    }

    ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler = sampler_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewHandleInfoNVX *>( this );
    }

    operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewHandleInfoNVX *>( this );
    }

    operator VkImageViewHandleInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewHandleInfoNVX *>( this );
    }

    operator VkImageViewHandleInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewHandleInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageView const &, DescriptorType const &, Sampler const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageView, descriptorType, sampler );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewHandleInfoNVX const & ) const = default;
#else
    bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) &&
             ( sampler == rhs.sampler );
#  endif
    }

    bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType          = StructureType::eImageViewHandleInfoNVX;
    const void *   pNext          = {};
    ImageView      imageView      = {};
    DescriptorType descriptorType = DescriptorType::eSampler;
    Sampler        sampler        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewHandleInfoNVX>
  {
    using Type = ImageViewHandleInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
  {
    using Type = ImageViewHandleInfoNVX;
  };

  // wrapper struct for struct VkImageViewMinLodCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewMinLodCreateInfoEXT.html
  struct ImageViewMinLodCreateInfoEXT
  {
    using NativeType = VkImageViewMinLodCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewMinLodCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minLod{ minLod_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewMinLodCreateInfoEXT( *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs ) )
    {
    }

    ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
    {
      minLod = minLod_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewMinLodCreateInfoEXT *>( this );
    }

    operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewMinLodCreateInfoEXT *>( this );
    }

    operator VkImageViewMinLodCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewMinLodCreateInfoEXT *>( this );
    }

    operator VkImageViewMinLodCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewMinLodCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minLod );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod );
#  endif
    }

    bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eImageViewMinLodCreateInfoEXT;
    const void *  pNext  = {};
    float         minLod = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewMinLodCreateInfoEXT>
  {
    using Type = ImageViewMinLodCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewMinLodCreateInfoEXT>
  {
    using Type = ImageViewMinLodCreateInfoEXT;
  };

  // wrapper struct for struct VkImageViewSampleWeightCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSampleWeightCreateInfoQCOM.html
  struct ImageViewSampleWeightCreateInfoQCOM
  {
    using NativeType = VkImageViewSampleWeightCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewSampleWeightCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( Offset2D     filterCenter_ = {},
                                                              Extent2D     filterSize_   = {},
                                                              uint32_t     numPhases_    = {},
                                                              const void * pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , filterCenter{ filterCenter_ }
      , filterSize{ filterSize_ }
      , numPhases{ numPhases_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast<ImageViewSampleWeightCreateInfoQCOM const *>( &rhs ) )
    {
    }

    ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewSampleWeightCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT
    {
      filterCenter = filterCenter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT
    {
      filterSize = filterSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT
    {
      numPhases = numPhases_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM *>( this );
    }

    operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM *>( this );
    }

    operator VkImageViewSampleWeightCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewSampleWeightCreateInfoQCOM *>( this );
    }

    operator VkImageViewSampleWeightCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewSampleWeightCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Offset2D const &, Extent2D const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, filterCenter, filterSize, numPhases );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default;
#else
    bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) &&
             ( numPhases == rhs.numPhases );
#  endif
    }

    bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eImageViewSampleWeightCreateInfoQCOM;
    const void *  pNext        = {};
    Offset2D      filterCenter = {};
    Extent2D      filterSize   = {};
    uint32_t      numPhases    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewSampleWeightCreateInfoQCOM>
  {
    using Type = ImageViewSampleWeightCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewSampleWeightCreateInfoQCOM>
  {
    using Type = ImageViewSampleWeightCreateInfoQCOM;
  };

  // wrapper struct for struct VkImageViewSlicedCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSlicedCreateInfoEXT.html
  struct ImageViewSlicedCreateInfoEXT
  {
    using NativeType = VkImageViewSlicedCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewSlicedCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sliceOffset{ sliceOffset_ }
      , sliceCount{ sliceCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewSlicedCreateInfoEXT( *reinterpret_cast<ImageViewSlicedCreateInfoEXT const *>( &rhs ) )
    {
    }

    ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewSlicedCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceOffset = sliceOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceCount = sliceCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewSlicedCreateInfoEXT *>( this );
    }

    operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewSlicedCreateInfoEXT *>( this );
    }

    operator VkImageViewSlicedCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewSlicedCreateInfoEXT *>( this );
    }

    operator VkImageViewSlicedCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewSlicedCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sliceOffset, sliceCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sliceOffset == rhs.sliceOffset ) && ( sliceCount == rhs.sliceCount );
#  endif
    }

    bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::eImageViewSlicedCreateInfoEXT;
    const void *  pNext       = {};
    uint32_t      sliceOffset = {};
    uint32_t      sliceCount  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewSlicedCreateInfoEXT>
  {
    using Type = ImageViewSlicedCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewSlicedCreateInfoEXT>
  {
    using Type = ImageViewSlicedCreateInfoEXT;
  };

  // wrapper struct for struct VkImageViewUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewUsageCreateInfo.html
  struct ImageViewUsageCreateInfo
  {
    using NativeType = VkImageViewUsageCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImageViewUsageCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
    {
    }

    ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewUsageCreateInfo *>( this );
    }

    operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewUsageCreateInfo *>( this );
    }

    operator VkImageViewUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImageViewUsageCreateInfo *>( this );
    }

    operator VkImageViewUsageCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImageViewUsageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, usage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
#else
    bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
#  endif
    }

    bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType = StructureType::eImageViewUsageCreateInfo;
    const void *    pNext = {};
    ImageUsageFlags usage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImageViewUsageCreateInfo>
  {
    using Type = ImageViewUsageCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
  {
    using Type = ImageViewUsageCreateInfo;
  };

  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkImportAndroidHardwareBufferInfoANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportAndroidHardwareBufferInfoANDROID.html
  struct ImportAndroidHardwareBufferInfoANDROID
  {
    using NativeType = VkImportAndroidHardwareBufferInfoANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportAndroidHardwareBufferInfoANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
    {
    }

    ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkImportAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkImportAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, struct AHardwareBuffer * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default;
#  else
    bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#    endif
    }

    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType            sType  = StructureType::eImportAndroidHardwareBufferInfoANDROID;
    const void *             pNext  = {};
    struct AHardwareBuffer * buffer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportAndroidHardwareBufferInfoANDROID>
  {
    using Type = ImportAndroidHardwareBufferInfoANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
  {
    using Type = ImportAndroidHardwareBufferInfoANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  // wrapper struct for struct VkImportFenceFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceFdInfoKHR.html
  struct ImportFenceFdInfoKHR
  {
    using NativeType = VkImportFenceFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportFenceFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( Fence                           fence_      = {},
                                               FenceImportFlags                flags_      = {},
                                               ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                               int                             fd_         = {},
                                               const void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fence{ fence_ }
      , flags{ flags_ }
      , handleType{ handleType_ }
      , fd{ fd_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
    {
    }

    ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportFenceFdInfoKHR *>( this );
    }

    operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportFenceFdInfoKHR *>( this );
    }

    operator VkImportFenceFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportFenceFdInfoKHR *>( this );
    }

    operator VkImportFenceFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportFenceFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Fence const &, FenceImportFlags const &, ExternalFenceHandleTypeFlagBits const &, int const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fence, flags, handleType, fd );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( fd == rhs.fd );
#  endif
    }

    bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType      = StructureType::eImportFenceFdInfoKHR;
    const void *                    pNext      = {};
    Fence                           fence      = {};
    FenceImportFlags                flags      = {};
    ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd;
    int                             fd         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportFenceFdInfoKHR>
  {
    using Type = ImportFenceFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
  {
    using Type = ImportFenceFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkImportFenceWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceWin32HandleInfoKHR.html
  struct ImportFenceWin32HandleInfoKHR
  {
    using NativeType = VkImportFenceWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportFenceWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( Fence                           fence_      = {},
                                                        FenceImportFlags                flags_      = {},
                                                        ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                                        HANDLE                          handle_     = {},
                                                        LPCWSTR                         name_       = {},
                                                        const void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fence{ fence_ }
      , flags{ flags_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkImportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkImportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Fence const &,
               FenceImportFlags const &,
               ExternalFenceHandleTypeFlagBits const &,
               HANDLE const &,
               LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fence, flags, handleType, handle, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( handle == rhs.handle ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                   sType      = StructureType::eImportFenceWin32HandleInfoKHR;
    const void *                    pNext      = {};
    Fence                           fence      = {};
    FenceImportFlags                flags      = {};
    ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd;
    HANDLE                          handle     = {};
    LPCWSTR                         name       = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportFenceWin32HandleInfoKHR>
  {
    using Type = ImportFenceWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
  {
    using Type = ImportFenceWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImportMemoryBufferCollectionFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryBufferCollectionFUCHSIA.html
  struct ImportMemoryBufferCollectionFUCHSIA
  {
    using NativeType = VkImportMemoryBufferCollectionFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryBufferCollectionFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryBufferCollectionFUCHSIA( BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , collection{ collection_ }
      , index{ index_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs ) )
    {
    }

    ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
    {
      collection = collection_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA *>( this );
    }

    operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA *>( this );
    }

    operator VkImportMemoryBufferCollectionFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA *>( this );
    }

    operator VkImportMemoryBufferCollectionFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, BufferCollectionFUCHSIA const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, collection, index );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default;
#  else
    bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index );
#    endif
    }

    bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType           sType      = StructureType::eImportMemoryBufferCollectionFUCHSIA;
    const void *            pNext      = {};
    BufferCollectionFUCHSIA collection = {};
    uint32_t                index      = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryBufferCollectionFUCHSIA>
  {
    using Type = ImportMemoryBufferCollectionFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryBufferCollectionFUCHSIA>
  {
    using Type = ImportMemoryBufferCollectionFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkImportMemoryFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryFdInfoKHR.html
  struct ImportMemoryFdInfoKHR
  {
    using NativeType = VkImportMemoryFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                int                              fd_         = {},
                                                const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , fd{ fd_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
    {
    }

    ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryFdInfoKHR *>( this );
    }

    operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryFdInfoKHR *>( this );
    }

    operator VkImportMemoryFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryFdInfoKHR *>( this );
    }

    operator VkImportMemoryFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &, int const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, fd );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
#  endif
    }

    bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType      = StructureType::eImportMemoryFdInfoKHR;
    const void *                     pNext      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    int                              fd         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryFdInfoKHR>
  {
    using Type = ImportMemoryFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
  {
    using Type = ImportMemoryFdInfoKHR;
  };

  // wrapper struct for struct VkImportMemoryHostPointerInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryHostPointerInfoEXT.html
  struct ImportMemoryHostPointerInfoEXT
  {
    using NativeType = VkImportMemoryHostPointerInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryHostPointerInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_   = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                         void *                           pHostPointer_ = {},
                                                         const void *                     pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , pHostPointer{ pHostPointer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
    {
    }

    ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
    {
      pHostPointer = pHostPointer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>( this );
    }

    operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>( this );
    }

    operator VkImportMemoryHostPointerInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>( this );
    }

    operator VkImportMemoryHostPointerInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, pHostPointer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
#else
    bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer );
#  endif
    }

    bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType        = StructureType::eImportMemoryHostPointerInfoEXT;
    const void *                     pNext        = {};
    ExternalMemoryHandleTypeFlagBits handleType   = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    void *                           pHostPointer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryHostPointerInfoEXT>
  {
    using Type = ImportMemoryHostPointerInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
  {
    using Type = ImportMemoryHostPointerInfoEXT;
  };

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkImportMemoryMetalHandleInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryMetalHandleInfoEXT.html
  struct ImportMemoryMetalHandleInfoEXT
  {
    using NativeType = VkImportMemoryMetalHandleInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryMetalHandleInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                         void *                           handle_     = {},
                                                         const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryMetalHandleInfoEXT( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryMetalHandleInfoEXT( *reinterpret_cast<ImportMemoryMetalHandleInfoEXT const *>( &rhs ) )
    {
    }

    ImportMemoryMetalHandleInfoEXT & operator=( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryMetalHandleInfoEXT & operator=( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryMetalHandleInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryMetalHandleInfoEXT *>( this );
    }

    operator VkImportMemoryMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryMetalHandleInfoEXT *>( this );
    }

    operator VkImportMemoryMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryMetalHandleInfoEXT *>( this );
    }

    operator VkImportMemoryMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryMetalHandleInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, handle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryMetalHandleInfoEXT const & ) const = default;
#  else
    bool operator==( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle );
#    endif
    }

    bool operator!=( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType      = StructureType::eImportMemoryMetalHandleInfoEXT;
    const void *                     pNext      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    void *                           handle     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryMetalHandleInfoEXT>
  {
    using Type = ImportMemoryMetalHandleInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryMetalHandleInfoEXT>
  {
    using Type = ImportMemoryMetalHandleInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkImportMemoryWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoKHR.html
  struct ImportMemoryWin32HandleInfoKHR
  {
    using NativeType = VkImportMemoryWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                         HANDLE                           handle_     = {},
                                                         LPCWSTR                          name_       = {},
                                                         const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkImportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkImportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, handle, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType      = StructureType::eImportMemoryWin32HandleInfoKHR;
    const void *                     pNext      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    HANDLE                           handle     = {};
    LPCWSTR                          name       = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryWin32HandleInfoKHR>
  {
    using Type = ImportMemoryWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
  {
    using Type = ImportMemoryWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkImportMemoryWin32HandleInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoNV.html
  struct ImportMemoryWin32HandleInfoNV
  {
    using NativeType = VkImportMemoryWin32HandleInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryWin32HandleInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
    {
    }

    ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkImportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkImportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagsNV const &, HANDLE const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, handle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default;
#  else
    bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle );
#    endif
    }

    bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                   sType      = StructureType::eImportMemoryWin32HandleInfoNV;
    const void *                    pNext      = {};
    ExternalMemoryHandleTypeFlagsNV handleType = {};
    HANDLE                          handle     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryWin32HandleInfoNV>
  {
    using Type = ImportMemoryWin32HandleInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
  {
    using Type = ImportMemoryWin32HandleInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImportMemoryZirconHandleInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryZirconHandleInfoFUCHSIA.html
  struct ImportMemoryZirconHandleInfoFUCHSIA
  {
    using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                              zx_handle_t                      handle_     = {},
                                                              const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {
    }

    ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportMemoryZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportMemoryZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &, zx_handle_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType, handle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
             ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                    sType      = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
    const void *                     pNext      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    zx_handle_t                      handle     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMemoryZirconHandleInfoFUCHSIA>
  {
    using Type = ImportMemoryZirconHandleInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
  {
    using Type = ImportMemoryZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkImportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalBufferInfoEXT.html
  struct ImportMetalBufferInfoEXT
  {
    using NativeType = VkImportMetalBufferInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMetalBufferInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mtlBuffer{ mtlBuffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMetalBufferInfoEXT( *reinterpret_cast<ImportMetalBufferInfoEXT const *>( &rhs ) )
    {
    }

    ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMetalBufferInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlBuffer = mtlBuffer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMetalBufferInfoEXT *>( this );
    }

    operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMetalBufferInfoEXT *>( this );
    }

    operator VkImportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMetalBufferInfoEXT *>( this );
    }

    operator VkImportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMetalBufferInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MTLBuffer_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mtlBuffer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default;
#  else
    bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer );
#    endif
    }

    bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType     = StructureType::eImportMetalBufferInfoEXT;
    const void *  pNext     = {};
    MTLBuffer_id  mtlBuffer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMetalBufferInfoEXT>
  {
    using Type = ImportMetalBufferInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMetalBufferInfoEXT>
  {
    using Type = ImportMetalBufferInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkImportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalIOSurfaceInfoEXT.html
  struct ImportMetalIOSurfaceInfoEXT
  {
    using NativeType = VkImportMetalIOSurfaceInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMetalIoSurfaceInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , ioSurface{ ioSurface_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast<ImportMetalIOSurfaceInfoEXT const *>( &rhs ) )
    {
    }

    ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMetalIOSurfaceInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT
    {
      ioSurface = ioSurface_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkImportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMetalIOSurfaceInfoEXT *>( this );
    }

    operator VkImportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMetalIOSurfaceInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, IOSurfaceRef const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, ioSurface );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default;
#  else
    bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface );
#    endif
    }

    bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType     = StructureType::eImportMetalIoSurfaceInfoEXT;
    const void *  pNext     = {};
    IOSurfaceRef  ioSurface = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMetalIOSurfaceInfoEXT>
  {
    using Type = ImportMetalIOSurfaceInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMetalIoSurfaceInfoEXT>
  {
    using Type = ImportMetalIOSurfaceInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkImportMetalSharedEventInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalSharedEventInfoEXT.html
  struct ImportMetalSharedEventInfoEXT
  {
    using NativeType = VkImportMetalSharedEventInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMetalSharedEventInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mtlSharedEvent{ mtlSharedEvent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMetalSharedEventInfoEXT( *reinterpret_cast<ImportMetalSharedEventInfoEXT const *>( &rhs ) )
    {
    }

    ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMetalSharedEventInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlSharedEvent = mtlSharedEvent_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMetalSharedEventInfoEXT *>( this );
    }

    operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMetalSharedEventInfoEXT *>( this );
    }

    operator VkImportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMetalSharedEventInfoEXT *>( this );
    }

    operator VkImportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMetalSharedEventInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MTLSharedEvent_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mtlSharedEvent );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default;
#  else
    bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent );
#    endif
    }

    bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType     sType          = StructureType::eImportMetalSharedEventInfoEXT;
    const void *      pNext          = {};
    MTLSharedEvent_id mtlSharedEvent = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMetalSharedEventInfoEXT>
  {
    using Type = ImportMetalSharedEventInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMetalSharedEventInfoEXT>
  {
    using Type = ImportMetalSharedEventInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkImportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalTextureInfoEXT.html
  struct ImportMetalTextureInfoEXT
  {
    using NativeType = VkImportMetalTextureInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportMetalTextureInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImageAspectFlagBits plane_      = ImageAspectFlagBits::eColor,
                                                    MTLTexture_id       mtlTexture_ = {},
                                                    const void *        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , plane{ plane_ }
      , mtlTexture{ mtlTexture_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMetalTextureInfoEXT( *reinterpret_cast<ImportMetalTextureInfoEXT const *>( &rhs ) )
    {
    }

    ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportMetalTextureInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT
    {
      plane = plane_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT
    {
      mtlTexture = mtlTexture_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMetalTextureInfoEXT *>( this );
    }

    operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMetalTextureInfoEXT *>( this );
    }

    operator VkImportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportMetalTextureInfoEXT *>( this );
    }

    operator VkImportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportMetalTextureInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageAspectFlagBits const &, MTLTexture_id const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, plane, mtlTexture );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default;
#  else
    bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture );
#    endif
    }

    bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType       sType      = StructureType::eImportMetalTextureInfoEXT;
    const void *        pNext      = {};
    ImageAspectFlagBits plane      = ImageAspectFlagBits::eColor;
    MTLTexture_id       mtlTexture = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportMetalTextureInfoEXT>
  {
    using Type = ImportMetalTextureInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportMetalTextureInfoEXT>
  {
    using Type = ImportMetalTextureInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkImportNativeBufferInfoOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportNativeBufferInfoOHOS.html
  struct ImportNativeBufferInfoOHOS
  {
    using NativeType = VkImportNativeBufferInfoOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportNativeBufferInfoOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportNativeBufferInfoOHOS( struct OH_NativeBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportNativeBufferInfoOHOS( ImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportNativeBufferInfoOHOS( VkImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportNativeBufferInfoOHOS( *reinterpret_cast<ImportNativeBufferInfoOHOS const *>( &rhs ) )
    {
    }

    ImportNativeBufferInfoOHOS & operator=( ImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportNativeBufferInfoOHOS & operator=( VkImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportNativeBufferInfoOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS & setBuffer( struct OH_NativeBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportNativeBufferInfoOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportNativeBufferInfoOHOS *>( this );
    }

    operator VkImportNativeBufferInfoOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportNativeBufferInfoOHOS *>( this );
    }

    operator VkImportNativeBufferInfoOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportNativeBufferInfoOHOS *>( this );
    }

    operator VkImportNativeBufferInfoOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportNativeBufferInfoOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, struct OH_NativeBuffer * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportNativeBufferInfoOHOS const & ) const = default;
#  else
    bool operator==( ImportNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#    endif
    }

    bool operator!=( ImportNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType            sType  = StructureType::eImportNativeBufferInfoOHOS;
    const void *             pNext  = {};
    struct OH_NativeBuffer * buffer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportNativeBufferInfoOHOS>
  {
    using Type = ImportNativeBufferInfoOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportNativeBufferInfoOHOS>
  {
    using Type = ImportNativeBufferInfoOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkImportScreenBufferInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportScreenBufferInfoQNX.html
  struct ImportScreenBufferInfoQNX
  {
    using NativeType = VkImportScreenBufferInfoQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportScreenBufferInfoQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportScreenBufferInfoQNX( *reinterpret_cast<ImportScreenBufferInfoQNX const *>( &rhs ) )
    {
    }

    ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportScreenBufferInfoQNX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportScreenBufferInfoQNX *>( this );
    }

    operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportScreenBufferInfoQNX *>( this );
    }

    operator VkImportScreenBufferInfoQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportScreenBufferInfoQNX *>( this );
    }

    operator VkImportScreenBufferInfoQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportScreenBufferInfoQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, struct _screen_buffer * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, buffer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default;
#  else
    bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
#    endif
    }

    bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType           sType  = StructureType::eImportScreenBufferInfoQNX;
    const void *            pNext  = {};
    struct _screen_buffer * buffer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportScreenBufferInfoQNX>
  {
    using Type = ImportScreenBufferInfoQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportScreenBufferInfoQNX>
  {
    using Type = ImportScreenBufferInfoQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

  // wrapper struct for struct VkImportSemaphoreFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreFdInfoKHR.html
  struct ImportSemaphoreFdInfoKHR
  {
    using NativeType = VkImportSemaphoreFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportSemaphoreFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( Semaphore                           semaphore_  = {},
                                                   SemaphoreImportFlags                flags_      = {},
                                                   ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                   int                                 fd_         = {},
                                                   const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , flags{ flags_ }
      , handleType{ handleType_ }
      , fd{ fd_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
    {
    }

    ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( this );
    }

    operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR *>( this );
    }

    operator VkImportSemaphoreFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( this );
    }

    operator VkImportSemaphoreFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportSemaphoreFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Semaphore const &,
               SemaphoreImportFlags const &,
               ExternalSemaphoreHandleTypeFlagBits const &,
               int const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, flags, handleType, fd );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( fd == rhs.fd );
#  endif
    }

    bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType      = StructureType::eImportSemaphoreFdInfoKHR;
    const void *                        pNext      = {};
    Semaphore                           semaphore  = {};
    SemaphoreImportFlags                flags      = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    int                                 fd         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportSemaphoreFdInfoKHR>
  {
    using Type = ImportSemaphoreFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
  {
    using Type = ImportSemaphoreFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkImportSemaphoreWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreWin32HandleInfoKHR.html
  struct ImportSemaphoreWin32HandleInfoKHR
  {
    using NativeType = VkImportSemaphoreWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportSemaphoreWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( Semaphore                           semaphore_  = {},
                                                            SemaphoreImportFlags                flags_      = {},
                                                            ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                            HANDLE                              handle_     = {},
                                                            LPCWSTR                             name_       = {},
                                                            const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , flags{ flags_ }
      , handleType{ handleType_ }
      , handle{ handle_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkImportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkImportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Semaphore const &,
               SemaphoreImportFlags const &,
               ExternalSemaphoreHandleTypeFlagBits const &,
               HANDLE const &,
               LPCWSTR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, flags, handleType, handle, name );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( handle == rhs.handle ) && ( name == rhs.name );
#    endif
    }

    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                       sType      = StructureType::eImportSemaphoreWin32HandleInfoKHR;
    const void *                        pNext      = {};
    Semaphore                           semaphore  = {};
    SemaphoreImportFlags                flags      = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    HANDLE                              handle     = {};
    LPCWSTR                             name       = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ImportSemaphoreWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ImportSemaphoreWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkImportSemaphoreZirconHandleInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreZirconHandleInfoFUCHSIA.html
  struct ImportSemaphoreZirconHandleInfoFUCHSIA
  {
    using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportSemaphoreZirconHandleInfoFUCHSIA( Semaphore                           semaphore_    = {},
                                              SemaphoreImportFlags                flags_        = {},
                                              ExternalSemaphoreHandleTypeFlagBits handleType_   = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                              zx_handle_t                         zirconHandle_ = {},
                                              const void *                        pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , flags{ flags_ }
      , handleType{ handleType_ }
      , zirconHandle{ zirconHandle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      zirconHandle = zirconHandle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Semaphore const &,
               SemaphoreImportFlags const &,
               ExternalSemaphoreHandleTypeFlagBits const &,
               zx_handle_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                       sType        = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
    const void *                        pNext        = {};
    Semaphore                           semaphore    = {};
    SemaphoreImportFlags                flags        = {};
    ExternalSemaphoreHandleTypeFlagBits handleType   = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    zx_handle_t                         zirconHandle = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkImportSemaphoreZirconHandleInfoFUCHSIA>
  {
    using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
  {
    using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkIndirectCommandsExecutionSetTokenEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsExecutionSetTokenEXT.html
  struct IndirectCommandsExecutionSetTokenEXT
  {
    using NativeType = VkIndirectCommandsExecutionSetTokenEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectExecutionSetInfoTypeEXT type_         = IndirectExecutionSetInfoTypeEXT::ePipelines,
                                                               ShaderStageFlags                shaderStages_ = {} ) VULKAN_HPP_NOEXCEPT
      : type{ type_ }
      , shaderStages{ shaderStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast<IndirectCommandsExecutionSetTokenEXT const *>( &rhs ) )
    {
    }

    IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsExecutionSetTokenEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStages = shaderStages_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsExecutionSetTokenEXT *>( this );
    }

    operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsExecutionSetTokenEXT *>( this );
    }

    operator VkIndirectCommandsExecutionSetTokenEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsExecutionSetTokenEXT *>( this );
    }

    operator VkIndirectCommandsExecutionSetTokenEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsExecutionSetTokenEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<IndirectExecutionSetInfoTypeEXT const &, ShaderStageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( type, shaderStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default;
#else
    bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( type == rhs.type ) && ( shaderStages == rhs.shaderStages );
#  endif
    }

    bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    IndirectExecutionSetInfoTypeEXT type         = IndirectExecutionSetInfoTypeEXT::ePipelines;
    ShaderStageFlags                shaderStages = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsExecutionSetTokenEXT>
  {
    using Type = IndirectCommandsExecutionSetTokenEXT;
  };
#endif

  // wrapper struct for struct VkIndirectCommandsIndexBufferTokenEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsIndexBufferTokenEXT.html
  struct IndirectCommandsIndexBufferTokenEXT
  {
    using NativeType = VkIndirectCommandsIndexBufferTokenEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT(
      IndirectCommandsInputModeFlagBitsEXT mode_ = IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) VULKAN_HPP_NOEXCEPT : mode{ mode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast<IndirectCommandsIndexBufferTokenEXT const *>( &rhs ) )
    {
    }

    IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsIndexBufferTokenEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & setMode( IndirectCommandsInputModeFlagBitsEXT mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsIndexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsIndexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsIndexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsIndexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsIndexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsIndexBufferTokenEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<IndirectCommandsInputModeFlagBitsEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( mode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default;
#else
    bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( mode == rhs.mode );
#  endif
    }

    bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    IndirectCommandsInputModeFlagBitsEXT mode = IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsIndexBufferTokenEXT>
  {
    using Type = IndirectCommandsIndexBufferTokenEXT;
  };
#endif

  // wrapper struct for struct VkPushConstantRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantRange.html
  struct PushConstantRange
  {
    using NativeType = VkPushConstantRange;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PushConstantRange( ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
      : stageFlags{ stageFlags_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) ) {}

    PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PushConstantRange const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPushConstantRange *>( this );
    }

    operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPushConstantRange *>( this );
    }

    operator VkPushConstantRange const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPushConstantRange *>( this );
    }

    operator VkPushConstantRange *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPushConstantRange *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ShaderStageFlags const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( stageFlags, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PushConstantRange const & ) const = default;
#else
    bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ShaderStageFlags stageFlags = {};
    uint32_t         offset     = {};
    uint32_t         size       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPushConstantRange>
  {
    using Type = PushConstantRange;
  };
#endif

  // wrapper struct for struct VkIndirectCommandsPushConstantTokenEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsPushConstantTokenEXT.html
  struct IndirectCommandsPushConstantTokenEXT
  {
    using NativeType = VkIndirectCommandsPushConstantTokenEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( PushConstantRange updateRange_ = {} ) VULKAN_HPP_NOEXCEPT : updateRange{ updateRange_ } {}

    VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsPushConstantTokenEXT( *reinterpret_cast<IndirectCommandsPushConstantTokenEXT const *>( &rhs ) )
    {
    }

    IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsPushConstantTokenEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & setUpdateRange( PushConstantRange const & updateRange_ ) VULKAN_HPP_NOEXCEPT
    {
      updateRange = updateRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsPushConstantTokenEXT *>( this );
    }

    operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsPushConstantTokenEXT *>( this );
    }

    operator VkIndirectCommandsPushConstantTokenEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsPushConstantTokenEXT *>( this );
    }

    operator VkIndirectCommandsPushConstantTokenEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsPushConstantTokenEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PushConstantRange const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( updateRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default;
#else
    bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( updateRange == rhs.updateRange );
#  endif
    }

    bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    PushConstantRange updateRange = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsPushConstantTokenEXT>
  {
    using Type = IndirectCommandsPushConstantTokenEXT;
  };
#endif

  // wrapper struct for struct VkIndirectCommandsVertexBufferTokenEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsVertexBufferTokenEXT.html
  struct IndirectCommandsVertexBufferTokenEXT
  {
    using NativeType = VkIndirectCommandsVertexBufferTokenEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( uint32_t vertexBindingUnit_ = {} ) VULKAN_HPP_NOEXCEPT : vertexBindingUnit{ vertexBindingUnit_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast<IndirectCommandsVertexBufferTokenEXT const *>( &rhs ) )
    {
    }

    IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsVertexBufferTokenEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingUnit = vertexBindingUnit_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsVertexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsVertexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsVertexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsVertexBufferTokenEXT *>( this );
    }

    operator VkIndirectCommandsVertexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsVertexBufferTokenEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( vertexBindingUnit );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default;
#else
    bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( vertexBindingUnit == rhs.vertexBindingUnit );
#  endif
    }

    bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t vertexBindingUnit = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsVertexBufferTokenEXT>
  {
    using Type = IndirectCommandsVertexBufferTokenEXT;
  };
#endif

  union IndirectCommandsTokenDataEXT
  {
    using NativeType = VkIndirectCommandsTokenDataEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} ) : pPushConstant( pPushConstant_ )
    {
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) : pVertexBuffer( pVertexBuffer_ ) {}

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) : pIndexBuffer( pIndexBuffer_ ) {}

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) : pExecutionSet( pExecutionSet_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPPushConstant( const IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) VULKAN_HPP_NOEXCEPT
    {
      pPushConstant = pPushConstant_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPVertexBuffer( const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexBuffer = pVertexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPIndexBuffer( const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      pIndexBuffer = pIndexBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPExecutionSet( const IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) VULKAN_HPP_NOEXCEPT
    {
      pExecutionSet = pExecutionSet_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsTokenDataEXT const &() const
    {
      return *reinterpret_cast<const VkIndirectCommandsTokenDataEXT *>( this );
    }

    operator VkIndirectCommandsTokenDataEXT &()
    {
      return *reinterpret_cast<VkIndirectCommandsTokenDataEXT *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    const IndirectCommandsPushConstantTokenEXT * pPushConstant;
    const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer;
    const IndirectCommandsIndexBufferTokenEXT *  pIndexBuffer;
    const IndirectCommandsExecutionSetTokenEXT * pExecutionSet;
#else
    const VkIndirectCommandsPushConstantTokenEXT * pPushConstant;
    const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer;
    const VkIndirectCommandsIndexBufferTokenEXT *  pIndexBuffer;
    const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsTokenDataEXT>
  {
    using Type = IndirectCommandsTokenDataEXT;
  };
#endif

  // wrapper struct for struct VkIndirectCommandsLayoutTokenEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenEXT.html
  struct IndirectCommandsLayoutTokenEXT
  {
    using NativeType = VkIndirectCommandsLayoutTokenEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectCommandsLayoutTokenEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsTokenTypeEXT type_   = IndirectCommandsTokenTypeEXT::eExecutionSet,
                                                            IndirectCommandsTokenDataEXT data_   = {},
                                                            uint32_t                     offset_ = {},
                                                            const void *                 pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , data{ data_ }
      , offset{ offset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutTokenEXT( *reinterpret_cast<IndirectCommandsLayoutTokenEXT const *>( &rhs ) )
    {
    }

    IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsLayoutTokenEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( IndirectCommandsTokenTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( IndirectCommandsTokenDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenEXT *>( this );
    }

    operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutTokenEXT *>( this );
    }

    operator VkIndirectCommandsLayoutTokenEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsLayoutTokenEXT *>( this );
    }

    operator VkIndirectCommandsLayoutTokenEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsLayoutTokenEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, IndirectCommandsTokenTypeEXT const &, IndirectCommandsTokenDataEXT const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, data, offset );
    }
#endif

  public:
    StructureType                sType  = StructureType::eIndirectCommandsLayoutTokenEXT;
    const void *                 pNext  = {};
    IndirectCommandsTokenTypeEXT type   = IndirectCommandsTokenTypeEXT::eExecutionSet;
    IndirectCommandsTokenDataEXT data   = {};
    uint32_t                     offset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsLayoutTokenEXT>
  {
    using Type = IndirectCommandsLayoutTokenEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenEXT>
  {
    using Type = IndirectCommandsLayoutTokenEXT;
  };

  // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoEXT.html
  struct IndirectCommandsLayoutCreateInfoEXT
  {
    using NativeType = VkIndirectCommandsLayoutCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectCommandsLayoutCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutUsageFlagsEXT    flags_          = {},
                                                              ShaderStageFlags                       shaderStages_   = {},
                                                              uint32_t                               indirectStride_ = {},
                                                              PipelineLayout                         pipelineLayout_ = {},
                                                              uint32_t                               tokenCount_     = {},
                                                              const IndirectCommandsLayoutTokenEXT * pTokens_        = {},
                                                              const void *                           pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , shaderStages{ shaderStages_ }
      , indirectStride{ indirectStride_ }
      , pipelineLayout{ pipelineLayout_ }
      , tokenCount{ tokenCount_ }
      , pTokens{ pTokens_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast<IndirectCommandsLayoutCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutUsageFlagsEXT                                   flags_,
                                         ShaderStageFlags                                                      shaderStages_,
                                         uint32_t                                                              indirectStride_,
                                         PipelineLayout                                                        pipelineLayout_,
                                         ArrayProxyNoTemporaries<const IndirectCommandsLayoutTokenEXT> const & tokens_,
                                         const void *                                                          pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , shaderStages( shaderStages_ )
      , indirectStride( indirectStride_ )
      , pipelineLayout( pipelineLayout_ )
      , tokenCount( static_cast<uint32_t>( tokens_.size() ) )
      , pTokens( tokens_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsLayoutCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setFlags( IndirectCommandsLayoutUsageFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStages = shaderStages_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectStride = indirectStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineLayout = pipelineLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = tokenCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPTokens( const IndirectCommandsLayoutTokenEXT * pTokens_ ) VULKAN_HPP_NOEXCEPT
    {
      pTokens = pTokens_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoEXT & setTokens( ArrayProxyNoTemporaries<const IndirectCommandsLayoutTokenEXT> const & tokens_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = static_cast<uint32_t>( tokens_.size() );
      pTokens    = tokens_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoEXT *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               IndirectCommandsLayoutUsageFlagsEXT const &,
               ShaderStageFlags const &,
               uint32_t const &,
               PipelineLayout const &,
               uint32_t const &,
               const IndirectCommandsLayoutTokenEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( shaderStages == rhs.shaderStages ) &&
             ( indirectStride == rhs.indirectStride ) && ( pipelineLayout == rhs.pipelineLayout ) && ( tokenCount == rhs.tokenCount ) &&
             ( pTokens == rhs.pTokens );
#  endif
    }

    bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType          = StructureType::eIndirectCommandsLayoutCreateInfoEXT;
    const void *                           pNext          = {};
    IndirectCommandsLayoutUsageFlagsEXT    flags          = {};
    ShaderStageFlags                       shaderStages   = {};
    uint32_t                               indirectStride = {};
    PipelineLayout                         pipelineLayout = {};
    uint32_t                               tokenCount     = {};
    const IndirectCommandsLayoutTokenEXT * pTokens        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsLayoutCreateInfoEXT>
  {
    using Type = IndirectCommandsLayoutCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoEXT>
  {
    using Type = IndirectCommandsLayoutCreateInfoEXT;
  };

  // wrapper struct for struct VkIndirectCommandsLayoutTokenNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenNV.html
  struct IndirectCommandsLayoutTokenNV
  {
    using NativeType = VkIndirectCommandsLayoutTokenNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectCommandsLayoutTokenNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsTokenTypeNV tokenType_                    = IndirectCommandsTokenTypeNV::eShaderGroup,
                                                        uint32_t                    stream_                       = {},
                                                        uint32_t                    offset_                       = {},
                                                        uint32_t                    vertexBindingUnit_            = {},
                                                        Bool32                      vertexDynamicStride_          = {},
                                                        PipelineLayout              pushconstantPipelineLayout_   = {},
                                                        ShaderStageFlags            pushconstantShaderStageFlags_ = {},
                                                        uint32_t                    pushconstantOffset_           = {},
                                                        uint32_t                    pushconstantSize_             = {},
                                                        IndirectStateFlagsNV        indirectStateFlags_           = {},
                                                        uint32_t                    indexTypeCount_               = {},
                                                        const IndexType *           pIndexTypes_                  = {},
                                                        const uint32_t *            pIndexTypeValues_             = {},
                                                        const void *                pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tokenType{ tokenType_ }
      , stream{ stream_ }
      , offset{ offset_ }
      , vertexBindingUnit{ vertexBindingUnit_ }
      , vertexDynamicStride{ vertexDynamicStride_ }
      , pushconstantPipelineLayout{ pushconstantPipelineLayout_ }
      , pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ }
      , pushconstantOffset{ pushconstantOffset_ }
      , pushconstantSize{ pushconstantSize_ }
      , indirectStateFlags{ indirectStateFlags_ }
      , indexTypeCount{ indexTypeCount_ }
      , pIndexTypes{ pIndexTypes_ }
      , pIndexTypeValues{ pIndexTypeValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV( IndirectCommandsTokenTypeNV                      tokenType_,
                                   uint32_t                                         stream_,
                                   uint32_t                                         offset_,
                                   uint32_t                                         vertexBindingUnit_,
                                   Bool32                                           vertexDynamicStride_,
                                   PipelineLayout                                   pushconstantPipelineLayout_,
                                   ShaderStageFlags                                 pushconstantShaderStageFlags_,
                                   uint32_t                                         pushconstantOffset_,
                                   uint32_t                                         pushconstantSize_,
                                   IndirectStateFlagsNV                             indirectStateFlags_,
                                   ArrayProxyNoTemporaries<const IndexType> const & indexTypes_,
                                   ArrayProxyNoTemporaries<const uint32_t> const &  indexTypeValues_ = {},
                                   const void *                                     pNext_           = nullptr )
      : pNext( pNext_ )
      , tokenType( tokenType_ )
      , stream( stream_ )
      , offset( offset_ )
      , vertexBindingUnit( vertexBindingUnit_ )
      , vertexDynamicStride( vertexDynamicStride_ )
      , pushconstantPipelineLayout( pushconstantPipelineLayout_ )
      , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ )
      , pushconstantOffset( pushconstantOffset_ )
      , pushconstantSize( pushconstantSize_ )
      , indirectStateFlags( indirectStateFlags_ )
      , indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) )
      , pIndexTypes( indexTypes_.data() )
      , pIndexTypeValues( indexTypeValues_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
#    else
      if ( indexTypes_.size() != indexTypeValues_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenType = tokenType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
    {
      stream = stream_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingUnit = vertexBindingUnit_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexDynamicStride = vertexDynamicStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantPipelineLayout = pushconstantPipelineLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
      setPushconstantShaderStageFlags( ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantOffset = pushconstantOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantSize = pushconstantSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags( IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectStateFlags = indirectStateFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount = indexTypeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      pIndexTypes = pIndexTypes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV & setIndexTypes( ArrayProxyNoTemporaries<const IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
      pIndexTypes    = indexTypes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pIndexTypeValues = pIndexTypeValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV & setIndexTypeValues( ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount   = static_cast<uint32_t>( indexTypeValues_.size() );
      pIndexTypeValues = indexTypeValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV *>( this );
    }

    operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV *>( this );
    }

    operator VkIndirectCommandsLayoutTokenNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsLayoutTokenNV *>( this );
    }

    operator VkIndirectCommandsLayoutTokenNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsLayoutTokenNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               IndirectCommandsTokenTypeNV const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               PipelineLayout const &,
               ShaderStageFlags const &,
               uint32_t const &,
               uint32_t const &,
               IndirectStateFlagsNV const &,
               uint32_t const &,
               const IndexType * const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       tokenType,
                       stream,
                       offset,
                       vertexBindingUnit,
                       vertexDynamicStride,
                       pushconstantPipelineLayout,
                       pushconstantShaderStageFlags,
                       pushconstantOffset,
                       pushconstantSize,
                       indirectStateFlags,
                       indexTypeCount,
                       pIndexTypes,
                       pIndexTypeValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) &&
             ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) &&
             ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) &&
             ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) &&
             ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) &&
             ( pIndexTypeValues == rhs.pIndexTypeValues );
#  endif
    }

    bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                        = StructureType::eIndirectCommandsLayoutTokenNV;
    const void *                pNext                        = {};
    IndirectCommandsTokenTypeNV tokenType                    = IndirectCommandsTokenTypeNV::eShaderGroup;
    uint32_t                    stream                       = {};
    uint32_t                    offset                       = {};
    uint32_t                    vertexBindingUnit            = {};
    Bool32                      vertexDynamicStride          = {};
    PipelineLayout              pushconstantPipelineLayout   = {};
    ShaderStageFlags            pushconstantShaderStageFlags = {};
    uint32_t                    pushconstantOffset           = {};
    uint32_t                    pushconstantSize             = {};
    IndirectStateFlagsNV        indirectStateFlags           = {};
    uint32_t                    indexTypeCount               = {};
    const IndexType *           pIndexTypes                  = {};
    const uint32_t *            pIndexTypeValues             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsLayoutTokenNV>
  {
    using Type = IndirectCommandsLayoutTokenNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
  {
    using Type = IndirectCommandsLayoutTokenNV;
  };

  // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoNV.html
  struct IndirectCommandsLayoutCreateInfoNV
  {
    using NativeType = VkIndirectCommandsLayoutCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectCommandsLayoutCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutUsageFlagsNV    flags_             = {},
                                                             PipelineBindPoint                     pipelineBindPoint_ = PipelineBindPoint::eGraphics,
                                                             uint32_t                              tokenCount_        = {},
                                                             const IndirectCommandsLayoutTokenNV * pTokens_           = {},
                                                             uint32_t                              streamCount_       = {},
                                                             const uint32_t *                      pStreamStrides_    = {},
                                                             const void *                          pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , tokenCount{ tokenCount_ }
      , pTokens{ pTokens_ }
      , streamCount{ streamCount_ }
      , pStreamStrides{ pStreamStrides_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutUsageFlagsNV                                   flags_,
                                        PipelineBindPoint                                                    pipelineBindPoint_,
                                        ArrayProxyNoTemporaries<const IndirectCommandsLayoutTokenNV> const & tokens_,
                                        ArrayProxyNoTemporaries<const uint32_t> const &                      streamStrides_ = {},
                                        const void *                                                         pNext_         = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , tokenCount( static_cast<uint32_t>( tokens_.size() ) )
      , pTokens( tokens_.data() )
      , streamCount( static_cast<uint32_t>( streamStrides_.size() ) )
      , pStreamStrides( streamStrides_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = tokenCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens( const IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
    {
      pTokens = pTokens_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV & setTokens( ArrayProxyNoTemporaries<const IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = static_cast<uint32_t>( tokens_.size() );
      pTokens    = tokens_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = streamCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
    {
      pStreamStrides = pStreamStrides_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV & setStreamStrides( ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount    = static_cast<uint32_t>( streamStrides_.size() );
      pStreamStrides = streamStrides_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               IndirectCommandsLayoutUsageFlagsNV const &,
               PipelineBindPoint const &,
               uint32_t const &,
               const IndirectCommandsLayoutTokenNV * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides );
#  endif
    }

    bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType             = StructureType::eIndirectCommandsLayoutCreateInfoNV;
    const void *                          pNext             = {};
    IndirectCommandsLayoutUsageFlagsNV    flags             = {};
    PipelineBindPoint                     pipelineBindPoint = PipelineBindPoint::eGraphics;
    uint32_t                              tokenCount        = {};
    const IndirectCommandsLayoutTokenNV * pTokens           = {};
    uint32_t                              streamCount       = {};
    const uint32_t *                      pStreamStrides    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectCommandsLayoutCreateInfoNV>
  {
    using Type = IndirectCommandsLayoutCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
  {
    using Type = IndirectCommandsLayoutCreateInfoNV;
  };

  // wrapper struct for struct VkIndirectExecutionSetPipelineInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetPipelineInfoEXT.html
  struct IndirectExecutionSetPipelineInfoEXT
  {
    using NativeType = VkIndirectExecutionSetPipelineInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectExecutionSetPipelineInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      IndirectExecutionSetPipelineInfoEXT( Pipeline initialPipeline_ = {}, uint32_t maxPipelineCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , initialPipeline{ initialPipeline_ }
      , maxPipelineCount{ maxPipelineCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast<IndirectExecutionSetPipelineInfoEXT const *>( &rhs ) )
    {
    }

    IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectExecutionSetPipelineInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      initialPipeline = initialPipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineCount = maxPipelineCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectExecutionSetPipelineInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectExecutionSetPipelineInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectExecutionSetPipelineInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectExecutionSetPipelineInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Pipeline const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, initialPipeline, maxPipelineCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default;
#else
    bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount );
#  endif
    }

    bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::eIndirectExecutionSetPipelineInfoEXT;
    const void *  pNext            = {};
    Pipeline      initialPipeline  = {};
    uint32_t      maxPipelineCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectExecutionSetPipelineInfoEXT>
  {
    using Type = IndirectExecutionSetPipelineInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectExecutionSetPipelineInfoEXT>
  {
    using Type = IndirectExecutionSetPipelineInfoEXT;
  };

  // wrapper struct for struct VkIndirectExecutionSetShaderLayoutInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderLayoutInfoEXT.html
  struct IndirectExecutionSetShaderLayoutInfoEXT
  {
    using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t                    setLayoutCount_ = {},
                                                                  const DescriptorSetLayout * pSetLayouts_    = {},
                                                                  const void *                pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , setLayoutCount{ setLayoutCount_ }
      , pSetLayouts{ pSetLayouts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast<IndirectExecutionSetShaderLayoutInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderLayoutInfoEXT( ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectExecutionSetShaderLayoutInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = setLayoutCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderLayoutInfoEXT & setSetLayouts( ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts    = setLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectExecutionSetShaderLayoutInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectExecutionSetShaderLayoutInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderLayoutInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectExecutionSetShaderLayoutInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderLayoutInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectExecutionSetShaderLayoutInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const DescriptorSetLayout * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, setLayoutCount, pSetLayouts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default;
#else
    bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts );
#  endif
    }

    bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType          = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT;
    const void *                pNext          = {};
    uint32_t                    setLayoutCount = {};
    const DescriptorSetLayout * pSetLayouts    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectExecutionSetShaderLayoutInfoEXT>
  {
    using Type = IndirectExecutionSetShaderLayoutInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectExecutionSetShaderLayoutInfoEXT>
  {
    using Type = IndirectExecutionSetShaderLayoutInfoEXT;
  };

  // wrapper struct for struct VkIndirectExecutionSetShaderInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderInfoEXT.html
  struct IndirectExecutionSetShaderInfoEXT
  {
    using NativeType = VkIndirectExecutionSetShaderInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectExecutionSetShaderInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t                                        shaderCount_            = {},
                                                            const ShaderEXT *                               pInitialShaders_        = {},
                                                            const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_        = {},
                                                            uint32_t                                        maxShaderCount_         = {},
                                                            uint32_t                                        pushConstantRangeCount_ = {},
                                                            const PushConstantRange *                       pPushConstantRanges_    = {},
                                                            const void *                                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCount{ shaderCount_ }
      , pInitialShaders{ pInitialShaders_ }
      , pSetLayoutInfos{ pSetLayoutInfos_ }
      , maxShaderCount{ maxShaderCount_ }
      , pushConstantRangeCount{ pushConstantRangeCount_ }
      , pPushConstantRanges{ pPushConstantRanges_ }
    {
    }

    VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast<IndirectExecutionSetShaderInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderInfoEXT( ArrayProxyNoTemporaries<const ShaderEXT> const &                               initialShaders_,
                                       ArrayProxyNoTemporaries<const IndirectExecutionSetShaderLayoutInfoEXT> const & setLayoutInfos_     = {},
                                       uint32_t                                                                       maxShaderCount_     = {},
                                       ArrayProxyNoTemporaries<const PushConstantRange> const &                       pushConstantRanges_ = {},
                                       const void *                                                                   pNext_              = nullptr )
      : pNext( pNext_ )
      , shaderCount( static_cast<uint32_t>( initialShaders_.size() ) )
      , pInitialShaders( initialShaders_.data() )
      , pSetLayoutInfos( setLayoutInfos_.data() )
      , maxShaderCount( maxShaderCount_ )
      , pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
      , pPushConstantRanges( pushConstantRanges_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) );
#    else
      if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectExecutionSetShaderInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCount = shaderCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPInitialShaders( const ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT
    {
      pInitialShaders = pInitialShaders_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderInfoEXT & setInitialShaders( ArrayProxyNoTemporaries<const ShaderEXT> const & initialShaders_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCount     = static_cast<uint32_t>( initialShaders_.size() );
      pInitialShaders = initialShaders_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT &
      setPSetLayoutInfos( const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayoutInfos = pSetLayoutInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderInfoEXT &
      setSetLayoutInfos( ArrayProxyNoTemporaries<const IndirectExecutionSetShaderLayoutInfoEXT> const & setLayoutInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCount     = static_cast<uint32_t>( setLayoutInfos_.size() );
      pSetLayoutInfos = setLayoutInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxShaderCount = maxShaderCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = pushConstantRangeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPPushConstantRanges( const PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pPushConstantRanges = pPushConstantRanges_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectExecutionSetShaderInfoEXT &
      setPushConstantRanges( ArrayProxyNoTemporaries<const PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
      pPushConstantRanges    = pushConstantRanges_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectExecutionSetShaderInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectExecutionSetShaderInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectExecutionSetShaderInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetShaderInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectExecutionSetShaderInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const ShaderEXT * const &,
               const IndirectExecutionSetShaderLayoutInfoEXT * const &,
               uint32_t const &,
               uint32_t const &,
               const PushConstantRange * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default;
#else
    bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) &&
             ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) &&
             ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges );
#  endif
    }

    bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                   sType                  = StructureType::eIndirectExecutionSetShaderInfoEXT;
    const void *                                    pNext                  = {};
    uint32_t                                        shaderCount            = {};
    const ShaderEXT *                               pInitialShaders        = {};
    const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos        = {};
    uint32_t                                        maxShaderCount         = {};
    uint32_t                                        pushConstantRangeCount = {};
    const PushConstantRange *                       pPushConstantRanges    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectExecutionSetShaderInfoEXT>
  {
    using Type = IndirectExecutionSetShaderInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectExecutionSetShaderInfoEXT>
  {
    using Type = IndirectExecutionSetShaderInfoEXT;
  };

  union IndirectExecutionSetInfoEXT
  {
    using NativeType = VkIndirectExecutionSetInfoEXT;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) : pPipelineInfo( pPipelineInfo_ ) {}

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) : pShaderInfo( pShaderInfo_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPPipelineInfo( const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineInfo = pPipelineInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPShaderInfo( const IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pShaderInfo = pShaderInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectExecutionSetInfoEXT const &() const
    {
      return *reinterpret_cast<const VkIndirectExecutionSetInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetInfoEXT &()
    {
      return *reinterpret_cast<VkIndirectExecutionSetInfoEXT *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo;
    const IndirectExecutionSetShaderInfoEXT *   pShaderInfo;
#else
    const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo;
    const VkIndirectExecutionSetShaderInfoEXT *   pShaderInfo;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectExecutionSetInfoEXT>
  {
    using Type = IndirectExecutionSetInfoEXT;
  };
#endif

  // wrapper struct for struct VkIndirectExecutionSetCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetCreateInfoEXT.html
  struct IndirectExecutionSetCreateInfoEXT
  {
    using NativeType = VkIndirectExecutionSetCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eIndirectExecutionSetCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetInfoTypeEXT type_  = IndirectExecutionSetInfoTypeEXT::ePipelines,
                                                               IndirectExecutionSetInfoEXT     info_  = {},
                                                               const void *                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , info{ info_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast<IndirectExecutionSetCreateInfoEXT const *>( &rhs ) )
    {
    }

    IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<IndirectExecutionSetCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT
    {
      info = info_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectExecutionSetCreateInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( this );
    }

    operator VkIndirectExecutionSetCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkIndirectExecutionSetCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, IndirectExecutionSetInfoTypeEXT const &, IndirectExecutionSetInfoEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, info );
    }
#endif

  public:
    StructureType                   sType = StructureType::eIndirectExecutionSetCreateInfoEXT;
    const void *                    pNext = {};
    IndirectExecutionSetInfoTypeEXT type  = IndirectExecutionSetInfoTypeEXT::ePipelines;
    IndirectExecutionSetInfoEXT     info  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkIndirectExecutionSetCreateInfoEXT>
  {
    using Type = IndirectExecutionSetCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eIndirectExecutionSetCreateInfoEXT>
  {
    using Type = IndirectExecutionSetCreateInfoEXT;
  };

  // wrapper struct for struct VkInitializePerformanceApiInfoINTEL, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInitializePerformanceApiInfoINTEL.html
  struct InitializePerformanceApiInfoINTEL
  {
    using NativeType = VkInitializePerformanceApiInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eInitializePerformanceApiInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pUserData{ pUserData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
    {
    }

    InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( this );
    }

    operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>( this );
    }

    operator VkInitializePerformanceApiInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( this );
    }

    operator VkInitializePerformanceApiInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pUserData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default;
#else
    bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData );
#  endif
    }

    bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eInitializePerformanceApiInfoINTEL;
    const void *  pNext     = {};
    void *        pUserData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkInitializePerformanceApiInfoINTEL>
  {
    using Type = InitializePerformanceApiInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
  {
    using Type = InitializePerformanceApiInfoINTEL;
  };

  // wrapper struct for struct VkInputAttachmentAspectReference, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInputAttachmentAspectReference.html
  struct InputAttachmentAspectReference
  {
    using NativeType = VkInputAttachmentAspectReference;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      InputAttachmentAspectReference( uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpass{ subpass_ }
      , inputAttachmentIndex{ inputAttachmentIndex_ }
      , aspectMask{ aspectMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
      : InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
    {
    }

    InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentIndex = inputAttachmentIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInputAttachmentAspectReference *>( this );
    }

    operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInputAttachmentAspectReference *>( this );
    }

    operator VkInputAttachmentAspectReference const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkInputAttachmentAspectReference *>( this );
    }

    operator VkInputAttachmentAspectReference *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkInputAttachmentAspectReference *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, ImageAspectFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( subpass, inputAttachmentIndex, aspectMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( InputAttachmentAspectReference const & ) const = default;
#else
    bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask );
#  endif
    }

    bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t         subpass              = {};
    uint32_t         inputAttachmentIndex = {};
    ImageAspectFlags aspectMask           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkInputAttachmentAspectReference>
  {
    using Type = InputAttachmentAspectReference;
  };
#endif
  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;

  // wrapper struct for struct VkInstanceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateInfo.html
  struct InstanceCreateInfo
  {
    using NativeType = VkInstanceCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eInstanceCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateFlags     flags_                   = {},
                                             const ApplicationInfo * pApplicationInfo_        = {},
                                             uint32_t                enabledLayerCount_       = {},
                                             const char * const *    ppEnabledLayerNames_     = {},
                                             uint32_t                enabledExtensionCount_   = {},
                                             const char * const *    ppEnabledExtensionNames_ = {},
                                             const void *            pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pApplicationInfo{ pApplicationInfo_ }
      , enabledLayerCount{ enabledLayerCount_ }
      , ppEnabledLayerNames{ ppEnabledLayerNames_ }
      , enabledExtensionCount{ enabledExtensionCount_ }
      , ppEnabledExtensionNames{ ppEnabledExtensionNames_ }
    {
    }

    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo( InstanceCreateFlags                                 flags_,
                        const ApplicationInfo *                             pApplicationInfo_,
                        ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_,
                        ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
                        const void *                                        pNext_                  = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , pApplicationInfo( pApplicationInfo_ )
      , enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
      , ppEnabledLayerNames( pEnabledLayerNames_.data() )
      , enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
      , ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<InstanceCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pApplicationInfo = pApplicationInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount = enabledLayerCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledLayerNames = ppEnabledLayerNames_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo & setPEnabledLayerNames( ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount   = static_cast<uint32_t>( pEnabledLayerNames_.size() );
      ppEnabledLayerNames = pEnabledLayerNames_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount = enabledExtensionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledExtensionNames = ppEnabledExtensionNames_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo & setPEnabledExtensionNames( ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount   = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInstanceCreateInfo *>( this );
    }

    operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInstanceCreateInfo *>( this );
    }

    operator VkInstanceCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkInstanceCreateInfo *>( this );
    }

    operator VkInstanceCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkInstanceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               InstanceCreateFlags const &,
               const ApplicationInfo * const &,
               uint32_t const &,
               const char * const * const &,
               uint32_t const &,
               const char * const * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 )
        return cmp;
      if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < enabledLayerCount; ++i )
      {
        if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
          if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
      }
      if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < enabledExtensionCount; ++i )
      {
        if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
          if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
            return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
      }

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) &&
             ( enabledLayerCount == rhs.enabledLayerCount ) &&
             std::equal( ppEnabledLayerNames,
                         ppEnabledLayerNames + enabledLayerCount,
                         rhs.ppEnabledLayerNames,
                         []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) &&
             ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
             std::equal( ppEnabledExtensionNames,
                         ppEnabledExtensionNames + enabledExtensionCount,
                         rhs.ppEnabledExtensionNames,
                         []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } );
    }

    bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType           sType                   = StructureType::eInstanceCreateInfo;
    const void *            pNext                   = {};
    InstanceCreateFlags     flags                   = {};
    const ApplicationInfo * pApplicationInfo        = {};
    uint32_t                enabledLayerCount       = {};
    const char * const *    ppEnabledLayerNames     = {};
    uint32_t                enabledExtensionCount   = {};
    const char * const *    ppEnabledExtensionNames = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkInstanceCreateInfo>
  {
    using Type = InstanceCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eInstanceCreateInfo>
  {
    using Type = InstanceCreateInfo;
  };

  // wrapper struct for struct VkLatencySleepInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepInfoNV.html
  struct LatencySleepInfoNV
  {
    using NativeType = VkLatencySleepInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLatencySleepInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , signalSemaphore{ signalSemaphore_ }
      , value{ value_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast<LatencySleepInfoNV const *>( &rhs ) ) {}

    LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LatencySleepInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphore = signalSemaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
    {
      value = value_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLatencySleepInfoNV *>( this );
    }

    operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLatencySleepInfoNV *>( this );
    }

    operator VkLatencySleepInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLatencySleepInfoNV *>( this );
    }

    operator VkLatencySleepInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLatencySleepInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, signalSemaphore, value );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LatencySleepInfoNV const & ) const = default;
#else
    bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value );
#  endif
    }

    bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eLatencySleepInfoNV;
    const void *  pNext           = {};
    Semaphore     signalSemaphore = {};
    uint64_t      value           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLatencySleepInfoNV>
  {
    using Type = LatencySleepInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLatencySleepInfoNV>
  {
    using Type = LatencySleepInfoNV;
  };

  // wrapper struct for struct VkLatencySleepModeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepModeInfoNV.html
  struct LatencySleepModeInfoNV
  {
    using NativeType = VkLatencySleepModeInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLatencySleepModeInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( Bool32       lowLatencyMode_    = {},
                                                 Bool32       lowLatencyBoost_   = {},
                                                 uint32_t     minimumIntervalUs_ = {},
                                                 const void * pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , lowLatencyMode{ lowLatencyMode_ }
      , lowLatencyBoost{ lowLatencyBoost_ }
      , minimumIntervalUs{ minimumIntervalUs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : LatencySleepModeInfoNV( *reinterpret_cast<LatencySleepModeInfoNV const *>( &rhs ) )
    {
    }

    LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LatencySleepModeInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT
    {
      lowLatencyMode = lowLatencyMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT
    {
      lowLatencyBoost = lowLatencyBoost_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT
    {
      minimumIntervalUs = minimumIntervalUs_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLatencySleepModeInfoNV *>( this );
    }

    operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLatencySleepModeInfoNV *>( this );
    }

    operator VkLatencySleepModeInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLatencySleepModeInfoNV *>( this );
    }

    operator VkLatencySleepModeInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLatencySleepModeInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, Bool32 const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LatencySleepModeInfoNV const & ) const = default;
#else
    bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) &&
             ( minimumIntervalUs == rhs.minimumIntervalUs );
#  endif
    }

    bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eLatencySleepModeInfoNV;
    const void *  pNext             = {};
    Bool32        lowLatencyMode    = {};
    Bool32        lowLatencyBoost   = {};
    uint32_t      minimumIntervalUs = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLatencySleepModeInfoNV>
  {
    using Type = LatencySleepModeInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLatencySleepModeInfoNV>
  {
    using Type = LatencySleepModeInfoNV;
  };

  // wrapper struct for struct VkLatencySubmissionPresentIdNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySubmissionPresentIdNV.html
  struct LatencySubmissionPresentIdNV
  {
    using NativeType = VkLatencySubmissionPresentIdNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLatencySubmissionPresentIdNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentID{ presentID_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : LatencySubmissionPresentIdNV( *reinterpret_cast<LatencySubmissionPresentIdNV const *>( &rhs ) )
    {
    }

    LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LatencySubmissionPresentIdNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT
    {
      presentID = presentID_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLatencySubmissionPresentIdNV *>( this );
    }

    operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLatencySubmissionPresentIdNV *>( this );
    }

    operator VkLatencySubmissionPresentIdNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLatencySubmissionPresentIdNV *>( this );
    }

    operator VkLatencySubmissionPresentIdNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLatencySubmissionPresentIdNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentID );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default;
#else
    bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID );
#  endif
    }

    bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eLatencySubmissionPresentIdNV;
    const void *  pNext     = {};
    uint64_t      presentID = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLatencySubmissionPresentIdNV>
  {
    using Type = LatencySubmissionPresentIdNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLatencySubmissionPresentIdNV>
  {
    using Type = LatencySubmissionPresentIdNV;
  };

  // wrapper struct for struct VkLatencySurfaceCapabilitiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySurfaceCapabilitiesNV.html
  struct LatencySurfaceCapabilitiesNV
  {
    using NativeType = VkLatencySurfaceCapabilitiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLatencySurfaceCapabilitiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentModeCount{ presentModeCount_ }
      , pPresentModes{ pPresentModes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : LatencySurfaceCapabilitiesNV( *reinterpret_cast<LatencySurfaceCapabilitiesNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    LatencySurfaceCapabilitiesNV( ArrayProxyNoTemporaries<PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LatencySurfaceCapabilitiesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = presentModeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentModes = pPresentModes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    LatencySurfaceCapabilitiesNV & setPresentModes( ArrayProxyNoTemporaries<PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = static_cast<uint32_t>( presentModes_.size() );
      pPresentModes    = presentModes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLatencySurfaceCapabilitiesNV *>( this );
    }

    operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLatencySurfaceCapabilitiesNV *>( this );
    }

    operator VkLatencySurfaceCapabilitiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLatencySurfaceCapabilitiesNV *>( this );
    }

    operator VkLatencySurfaceCapabilitiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLatencySurfaceCapabilitiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, PresentModeKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentModeCount, pPresentModes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default;
#else
    bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes );
#  endif
    }

    bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eLatencySurfaceCapabilitiesNV;
    const void *     pNext            = {};
    uint32_t         presentModeCount = {};
    PresentModeKHR * pPresentModes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLatencySurfaceCapabilitiesNV>
  {
    using Type = LatencySurfaceCapabilitiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLatencySurfaceCapabilitiesNV>
  {
    using Type = LatencySurfaceCapabilitiesNV;
  };

  // wrapper struct for struct VkLayerProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerProperties.html
  struct LayerProperties
  {
    using NativeType = VkLayerProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layerName_             = {},
                                             uint32_t                                             specVersion_           = {},
                                             uint32_t                                             implementationVersion_ = {},
                                             std::array<char, VK_MAX_DESCRIPTION_SIZE> const &    description_           = {} ) VULKAN_HPP_NOEXCEPT
      : layerName{ layerName_ }
      , specVersion{ specVersion_ }
      , implementationVersion{ implementationVersion_ }
      , description{ description_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) ) {}

    LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LayerProperties const *>( &rhs );
      return *this;
    }

    operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLayerProperties *>( this );
    }

    operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLayerProperties *>( this );
    }

    operator VkLayerProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLayerProperties *>( this );
    }

    operator VkLayerProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLayerProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &, uint32_t const &, ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( layerName, specVersion, implementationVersion, description );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) &&
             ( strcmp( description, rhs.description ) == 0 );
    }

    bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName             = {};
    uint32_t                                         specVersion           = {};
    uint32_t                                         implementationVersion = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE>    description           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLayerProperties>
  {
    using Type = LayerProperties;
  };
#endif

  // wrapper struct for struct VkLayerSettingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingEXT.html
  struct LayerSettingEXT
  {
    using NativeType = VkLayerSettingEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR LayerSettingEXT( const char *        pLayerName_   = {},
                                          const char *        pSettingName_ = {},
                                          LayerSettingTypeEXT type_         = LayerSettingTypeEXT::eBool32,
                                          uint32_t            valueCount_   = {},
                                          const void *        pValues_      = {} ) VULKAN_HPP_NOEXCEPT
      : pLayerName{ pLayerName_ }
      , pSettingName{ pSettingName_ }
      , type{ type_ }
      , valueCount{ valueCount_ }
      , pValues{ pValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast<LayerSettingEXT const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    // NOTE: you need to provide the type because Bool32 and uint32_t are indistinguishable!
    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const int32_t> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<int32_t>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const int64_t> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<int64_t>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const uint32_t> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<uint32_t>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const uint64_t> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<uint64_t>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const float> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<float>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const double> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<double>( type ) );
    }

    LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries<const char *> const & values_ )
      : pLayerName( pLayerName_ )
      , pSettingName( pSettingName_ )
      , type( type_ )
      , valueCount( static_cast<uint32_t>( values_.size() ) )
      , pValues( values_.data() )
    {
      VULKAN_HPP_ASSERT( isSameType<char *>( type ) );
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LayerSettingEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) VULKAN_HPP_NOEXCEPT
    {
      pLayerName = pLayerName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) VULKAN_HPP_NOEXCEPT
    {
      pSettingName = pSettingName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( LayerSettingTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = valueCount_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const int32_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const int64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const uint32_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const float> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const double> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }

    LayerSettingEXT & setValues( ArrayProxyNoTemporaries<const char *> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      valueCount = static_cast<uint32_t>( values_.size() );
      pValues    = values_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLayerSettingEXT *>( this );
    }

    operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLayerSettingEXT *>( this );
    }

    operator VkLayerSettingEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLayerSettingEXT *>( this );
    }

    operator VkLayerSettingEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLayerSettingEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<const char * const &, const char * const &, LayerSettingTypeEXT const &, uint32_t const &, const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( pLayerName, pSettingName, type, valueCount, pValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( pLayerName != rhs.pLayerName )
        if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( pSettingName != rhs.pSettingName )
        if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = type <=> rhs.type; cmp != 0 )
        return cmp;
      if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) &&
             ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) &&
             ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues );
    }

    bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    const char *        pLayerName   = {};
    const char *        pSettingName = {};
    LayerSettingTypeEXT type         = LayerSettingTypeEXT::eBool32;
    uint32_t            valueCount   = {};
    const void *        pValues      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLayerSettingEXT>
  {
    using Type = LayerSettingEXT;
  };
#endif

  // wrapper struct for struct VkLayerSettingsCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingsCreateInfoEXT.html
  struct LayerSettingsCreateInfoEXT
  {
    using NativeType = VkLayerSettingsCreateInfoEXT;

    static const bool                                  allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eLayerSettingsCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, const LayerSettingEXT * pSettings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , settingCount{ settingCount_ }
      , pSettings{ pSettings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : LayerSettingsCreateInfoEXT( *reinterpret_cast<LayerSettingsCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    LayerSettingsCreateInfoEXT( ArrayProxyNoTemporaries<const LayerSettingEXT> const & settings_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), settingCount( static_cast<uint32_t>( settings_.size() ) ), pSettings( settings_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<LayerSettingsCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      settingCount = settingCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const LayerSettingEXT * pSettings_ ) VULKAN_HPP_NOEXCEPT
    {
      pSettings = pSettings_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    LayerSettingsCreateInfoEXT & setSettings( ArrayProxyNoTemporaries<const LayerSettingEXT> const & settings_ ) VULKAN_HPP_NOEXCEPT
    {
      settingCount = static_cast<uint32_t>( settings_.size() );
      pSettings    = settings_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLayerSettingsCreateInfoEXT *>( this );
    }

    operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLayerSettingsCreateInfoEXT *>( this );
    }

    operator VkLayerSettingsCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkLayerSettingsCreateInfoEXT *>( this );
    }

    operator VkLayerSettingsCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkLayerSettingsCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const LayerSettingEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, settingCount, pSettings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default;
#else
    bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( settingCount == rhs.settingCount ) && ( pSettings == rhs.pSettings );
#  endif
    }

    bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType        = StructureType::eLayerSettingsCreateInfoEXT;
    const void *            pNext        = {};
    uint32_t                settingCount = {};
    const LayerSettingEXT * pSettings    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkLayerSettingsCreateInfoEXT>
  {
    using Type = LayerSettingsCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eLayerSettingsCreateInfoEXT>
  {
    using Type = LayerSettingsCreateInfoEXT;
  };

#if defined( VK_USE_PLATFORM_MACOS_MVK )
  // wrapper struct for struct VkMacOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMacOSSurfaceCreateInfoMVK.html
  struct MacOSSurfaceCreateInfoMVK
  {
    using NativeType = VkMacOSSurfaceCreateInfoMVK;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMacosSurfaceCreateInfoMVK;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pView{ pView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
      : MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
    {
    }

    MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
    {
      pView = pView_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkMacOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkMacOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MacOSSurfaceCreateFlagsMVK const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pView );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default;
#  else
    bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
#    endif
    }

    bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType              sType = StructureType::eMacosSurfaceCreateInfoMVK;
    const void *               pNext = {};
    MacOSSurfaceCreateFlagsMVK flags = {};
    const void *               pView = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMacOSSurfaceCreateInfoMVK>
  {
    using Type = MacOSSurfaceCreateInfoMVK;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
  {
    using Type = MacOSSurfaceCreateInfoMVK;
  };
#endif /*VK_USE_PLATFORM_MACOS_MVK*/

  // wrapper struct for struct VkMappedMemoryRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMappedMemoryRange.html
  struct MappedMemoryRange
  {
    using NativeType = VkMappedMemoryRange;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMappedMemoryRange;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MappedMemoryRange( DeviceMemory memory_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) ) {}

    MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MappedMemoryRange const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMappedMemoryRange *>( this );
    }

    operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMappedMemoryRange *>( this );
    }

    operator VkMappedMemoryRange const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMappedMemoryRange *>( this );
    }

    operator VkMappedMemoryRange *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMappedMemoryRange *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MappedMemoryRange const & ) const = default;
#else
    bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eMappedMemoryRange;
    const void *  pNext  = {};
    DeviceMemory  memory = {};
    DeviceSize    offset = {};
    DeviceSize    size   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMappedMemoryRange>
  {
    using Type = MappedMemoryRange;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMappedMemoryRange>
  {
    using Type = MappedMemoryRange;
  };

  // wrapper struct for struct VkMemoryAllocateFlagsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlagsInfo.html
  struct MemoryAllocateFlagsInfo
  {
    using NativeType = VkMemoryAllocateFlagsInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryAllocateFlagsInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , deviceMask{ deviceMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
    {
    }

    MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo *>( this );
    }

    operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryAllocateFlagsInfo *>( this );
    }

    operator VkMemoryAllocateFlagsInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryAllocateFlagsInfo *>( this );
    }

    operator VkMemoryAllocateFlagsInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryAllocateFlagsInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MemoryAllocateFlags const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, deviceMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
#else
    bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask );
#  endif
    }

    bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType      = StructureType::eMemoryAllocateFlagsInfo;
    const void *        pNext      = {};
    MemoryAllocateFlags flags      = {};
    uint32_t            deviceMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryAllocateFlagsInfo>
  {
    using Type = MemoryAllocateFlagsInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
  {
    using Type = MemoryAllocateFlagsInfo;
  };

  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;

  // wrapper struct for struct VkMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateInfo.html
  struct MemoryAllocateInfo
  {
    using NativeType = VkMemoryAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryAllocateInfo( DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allocationSize{ allocationSize_ }
      , memoryTypeIndex{ memoryTypeIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) ) {}

    MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
    {
      allocationSize = allocationSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryTypeIndex = memoryTypeIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryAllocateInfo *>( this );
    }

    operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryAllocateInfo *>( this );
    }

    operator VkMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryAllocateInfo *>( this );
    }

    operator VkMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceSize const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allocationSize, memoryTypeIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex );
#  endif
    }

    bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eMemoryAllocateInfo;
    const void *  pNext           = {};
    DeviceSize    allocationSize  = {};
    uint32_t      memoryTypeIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryAllocateInfo>
  {
    using Type = MemoryAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
  {
    using Type = MemoryAllocateInfo;
  };

  // wrapper struct for struct VkMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier.html
  struct MemoryBarrier
  {
    using NativeType = VkMemoryBarrier;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryBarrier;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryBarrier( AccessFlags srcAccessMask_ = {}, AccessFlags dstAccessMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstAccessMask{ dstAccessMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) ) {}

    MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryBarrier const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryBarrier *>( this );
    }

    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryBarrier *>( this );
    }

    operator VkMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryBarrier *>( this );
    }

    operator VkMemoryBarrier *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AccessFlags const &, AccessFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcAccessMask, dstAccessMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryBarrier const & ) const = default;
#else
    bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask );
#  endif
    }

    bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eMemoryBarrier;
    const void *  pNext         = {};
    AccessFlags   srcAccessMask = {};
    AccessFlags   dstAccessMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryBarrier>
  {
    using Type = MemoryBarrier;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryBarrier>
  {
    using Type = MemoryBarrier;
  };

  // wrapper struct for struct VkMemoryBarrierAccessFlags3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrierAccessFlags3KHR.html
  struct MemoryBarrierAccessFlags3KHR
  {
    using NativeType = VkMemoryBarrierAccessFlags3KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryBarrierAccessFlags3KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( AccessFlags3KHR srcAccessMask3_ = {},
                                                       AccessFlags3KHR dstAccessMask3_ = {},
                                                       const void *    pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcAccessMask3{ srcAccessMask3_ }
      , dstAccessMask3{ dstAccessMask3_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrierAccessFlags3KHR( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryBarrierAccessFlags3KHR( *reinterpret_cast<MemoryBarrierAccessFlags3KHR const *>( &rhs ) )
    {
    }

    MemoryBarrierAccessFlags3KHR & operator=( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryBarrierAccessFlags3KHR & operator=( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryBarrierAccessFlags3KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setSrcAccessMask3( AccessFlags3KHR srcAccessMask3_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask3 = srcAccessMask3_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setDstAccessMask3( AccessFlags3KHR dstAccessMask3_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask3 = dstAccessMask3_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryBarrierAccessFlags3KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryBarrierAccessFlags3KHR *>( this );
    }

    operator VkMemoryBarrierAccessFlags3KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryBarrierAccessFlags3KHR *>( this );
    }

    operator VkMemoryBarrierAccessFlags3KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryBarrierAccessFlags3KHR *>( this );
    }

    operator VkMemoryBarrierAccessFlags3KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryBarrierAccessFlags3KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AccessFlags3KHR const &, AccessFlags3KHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcAccessMask3, dstAccessMask3 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryBarrierAccessFlags3KHR const & ) const = default;
#else
    bool operator==( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask3 == rhs.srcAccessMask3 ) && ( dstAccessMask3 == rhs.dstAccessMask3 );
#  endif
    }

    bool operator!=( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType          = StructureType::eMemoryBarrierAccessFlags3KHR;
    const void *    pNext          = {};
    AccessFlags3KHR srcAccessMask3 = {};
    AccessFlags3KHR dstAccessMask3 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryBarrierAccessFlags3KHR>
  {
    using Type = MemoryBarrierAccessFlags3KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryBarrierAccessFlags3KHR>
  {
    using Type = MemoryBarrierAccessFlags3KHR;
  };

  // wrapper struct for struct VkMemoryDedicatedAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfo.html
  struct MemoryDedicatedAllocateInfo
  {
    using NativeType = VkMemoryDedicatedAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryDedicatedAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( Image image_ = {}, Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image{ image_ }
      , buffer{ buffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
    {
    }

    MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>( this );
    }

    operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo *>( this );
    }

    operator VkMemoryDedicatedAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>( this );
    }

    operator VkMemoryDedicatedAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryDedicatedAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Image const &, Buffer const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image, buffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
#  endif
    }

    bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eMemoryDedicatedAllocateInfo;
    const void *  pNext  = {};
    Image         image  = {};
    Buffer        buffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryDedicatedAllocateInfo>
  {
    using Type = MemoryDedicatedAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
  {
    using Type = MemoryDedicatedAllocateInfo;
  };

  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;

  // wrapper struct for struct VkMemoryDedicatedAllocateInfoTensorARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfoTensorARM.html
  struct MemoryDedicatedAllocateInfoTensorARM
  {
    using NativeType = VkMemoryDedicatedAllocateInfoTensorARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryDedicatedAllocateInfoTensorARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM( TensorARM tensor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensor{ tensor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedAllocateInfoTensorARM( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryDedicatedAllocateInfoTensorARM( *reinterpret_cast<MemoryDedicatedAllocateInfoTensorARM const *>( &rhs ) )
    {
    }

    MemoryDedicatedAllocateInfoTensorARM & operator=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryDedicatedAllocateInfoTensorARM & operator=( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryDedicatedAllocateInfoTensorARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryDedicatedAllocateInfoTensorARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfoTensorARM *>( this );
    }

    operator VkMemoryDedicatedAllocateInfoTensorARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryDedicatedAllocateInfoTensorARM *>( this );
    }

    operator VkMemoryDedicatedAllocateInfoTensorARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryDedicatedAllocateInfoTensorARM *>( this );
    }

    operator VkMemoryDedicatedAllocateInfoTensorARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryDedicatedAllocateInfoTensorARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryDedicatedAllocateInfoTensorARM const & ) const = default;
#else
    bool operator==( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor );
#  endif
    }

    bool operator!=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eMemoryDedicatedAllocateInfoTensorARM;
    const void *  pNext  = {};
    TensorARM     tensor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryDedicatedAllocateInfoTensorARM>
  {
    using Type = MemoryDedicatedAllocateInfoTensorARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfoTensorARM>
  {
    using Type = MemoryDedicatedAllocateInfoTensorARM;
  };

  // wrapper struct for struct VkMemoryDedicatedRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedRequirements.html
  struct MemoryDedicatedRequirements
  {
    using NativeType = VkMemoryDedicatedRequirements;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryDedicatedRequirements;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( Bool32 prefersDedicatedAllocation_  = {},
                                                      Bool32 requiresDedicatedAllocation_ = {},
                                                      void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , prefersDedicatedAllocation{ prefersDedicatedAllocation_ }
      , requiresDedicatedAllocation{ requiresDedicatedAllocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
    {
    }

    MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs );
      return *this;
    }

    operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryDedicatedRequirements *>( this );
    }

    operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryDedicatedRequirements *>( this );
    }

    operator VkMemoryDedicatedRequirements const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryDedicatedRequirements *>( this );
    }

    operator VkMemoryDedicatedRequirements *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryDedicatedRequirements *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
#else
    bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) &&
             ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
#  endif
    }

    bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::eMemoryDedicatedRequirements;
    void *        pNext                       = {};
    Bool32        prefersDedicatedAllocation  = {};
    Bool32        requiresDedicatedAllocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryDedicatedRequirements>
  {
    using Type = MemoryDedicatedRequirements;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
  {
    using Type = MemoryDedicatedRequirements;
  };

  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;

  // wrapper struct for struct VkMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryFdPropertiesKHR.html
  struct MemoryFdPropertiesKHR
  {
    using NativeType = VkMemoryFdPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryFdPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
    {
    }

    MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryFdPropertiesKHR *>( this );
    }

    operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryFdPropertiesKHR *>( this );
    }

    operator VkMemoryFdPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryFdPropertiesKHR *>( this );
    }

    operator VkMemoryFdPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryFdPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryTypeBits );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
#else
    bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
#  endif
    }

    bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eMemoryFdPropertiesKHR;
    void *        pNext          = {};
    uint32_t      memoryTypeBits = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryFdPropertiesKHR>
  {
    using Type = MemoryFdPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
  {
    using Type = MemoryFdPropertiesKHR;
  };

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkMemoryGetAndroidHardwareBufferInfoANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html
  struct MemoryGetAndroidHardwareBufferInfoANDROID
  {
    using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
    {
    }

    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default;
#  else
    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
#    endif
    }

    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType  = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
    const void *  pNext  = {};
    DeviceMemory  memory = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetAndroidHardwareBufferInfoANDROID>
  {
    using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
  {
    using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  // wrapper struct for struct VkMemoryGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetFdInfoKHR.html
  struct MemoryGetFdInfoKHR
  {
    using NativeType = VkMemoryGetFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( DeviceMemory                     memory_     = {},
                                             ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                             const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) ) {}

    MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetFdInfoKHR *>( this );
    }

    operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetFdInfoKHR *>( this );
    }

    operator VkMemoryGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetFdInfoKHR *>( this );
    }

    operator VkMemoryGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
#else
    bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType      = StructureType::eMemoryGetFdInfoKHR;
    const void *                     pNext      = {};
    DeviceMemory                     memory     = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetFdInfoKHR>
  {
    using Type = MemoryGetFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
  {
    using Type = MemoryGetFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkMemoryGetMetalHandleInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetMetalHandleInfoEXT.html
  struct MemoryGetMetalHandleInfoEXT
  {
    using NativeType = VkMemoryGetMetalHandleInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetMetalHandleInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( DeviceMemory                     memory_     = {},
                                                      ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                      const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetMetalHandleInfoEXT( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetMetalHandleInfoEXT( *reinterpret_cast<MemoryGetMetalHandleInfoEXT const *>( &rhs ) )
    {
    }

    MemoryGetMetalHandleInfoEXT & operator=( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetMetalHandleInfoEXT & operator=( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetMetalHandleInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT *>( this );
    }

    operator VkMemoryGetMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetMetalHandleInfoEXT *>( this );
    }

    operator VkMemoryGetMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT *>( this );
    }

    operator VkMemoryGetMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetMetalHandleInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetMetalHandleInfoEXT const & ) const = default;
#  else
    bool operator==( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType      = StructureType::eMemoryGetMetalHandleInfoEXT;
    const void *                     pNext      = {};
    DeviceMemory                     memory     = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetMetalHandleInfoEXT>
  {
    using Type = MemoryGetMetalHandleInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetMetalHandleInfoEXT>
  {
    using Type = MemoryGetMetalHandleInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkMemoryGetNativeBufferInfoOHOS, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetNativeBufferInfoOHOS.html
  struct MemoryGetNativeBufferInfoOHOS
  {
    using NativeType = VkMemoryGetNativeBufferInfoOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetNativeBufferInfoOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetNativeBufferInfoOHOS( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetNativeBufferInfoOHOS( MemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetNativeBufferInfoOHOS( VkMemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetNativeBufferInfoOHOS( *reinterpret_cast<MemoryGetNativeBufferInfoOHOS const *>( &rhs ) )
    {
    }

    MemoryGetNativeBufferInfoOHOS & operator=( MemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetNativeBufferInfoOHOS & operator=( VkMemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetNativeBufferInfoOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetNativeBufferInfoOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetNativeBufferInfoOHOS *>( this );
    }

    operator VkMemoryGetNativeBufferInfoOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetNativeBufferInfoOHOS *>( this );
    }

    operator VkMemoryGetNativeBufferInfoOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetNativeBufferInfoOHOS *>( this );
    }

    operator VkMemoryGetNativeBufferInfoOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetNativeBufferInfoOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetNativeBufferInfoOHOS const & ) const = default;
#  else
    bool operator==( MemoryGetNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
#    endif
    }

    bool operator!=( MemoryGetNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType  = StructureType::eMemoryGetNativeBufferInfoOHOS;
    const void *  pNext  = {};
    DeviceMemory  memory = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetNativeBufferInfoOHOS>
  {
    using Type = MemoryGetNativeBufferInfoOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetNativeBufferInfoOHOS>
  {
    using Type = MemoryGetNativeBufferInfoOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

  // wrapper struct for struct VkMemoryGetRemoteAddressInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetRemoteAddressInfoNV.html
  struct MemoryGetRemoteAddressInfoNV
  {
    using NativeType = VkMemoryGetRemoteAddressInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetRemoteAddressInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( DeviceMemory                     memory_     = {},
                                                       ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                       const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetRemoteAddressInfoNV( *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs ) )
    {
    }

    MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( this );
    }

    operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetRemoteAddressInfoNV *>( this );
    }

    operator VkMemoryGetRemoteAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( this );
    }

    operator VkMemoryGetRemoteAddressInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetRemoteAddressInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default;
#else
    bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType      = StructureType::eMemoryGetRemoteAddressInfoNV;
    const void *                     pNext      = {};
    DeviceMemory                     memory     = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetRemoteAddressInfoNV>
  {
    using Type = MemoryGetRemoteAddressInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetRemoteAddressInfoNV>
  {
    using Type = MemoryGetRemoteAddressInfoNV;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkMemoryGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetWin32HandleInfoKHR.html
  struct MemoryGetWin32HandleInfoKHR
  {
    using NativeType = VkMemoryGetWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( DeviceMemory                     memory_     = {},
                                                      ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                      const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( this );
    }

    operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>( this );
    }

    operator VkMemoryGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( this );
    }

    operator VkMemoryGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType      = StructureType::eMemoryGetWin32HandleInfoKHR;
    const void *                     pNext      = {};
    DeviceMemory                     memory     = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetWin32HandleInfoKHR>
  {
    using Type = MemoryGetWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
  {
    using Type = MemoryGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkMemoryGetZirconHandleInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetZirconHandleInfoFUCHSIA.html
  struct MemoryGetZirconHandleInfoFUCHSIA
  {
    using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( DeviceMemory                     memory_     = {},
                                                           ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                           const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {
    }

    MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkMemoryGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkMemoryGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                    sType      = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
    const void *                     pNext      = {};
    DeviceMemory                     memory     = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryGetZirconHandleInfoFUCHSIA>
  {
    using Type = MemoryGetZirconHandleInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
  {
    using Type = MemoryGetZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkMemoryHeap, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeap.html
  struct MemoryHeap
  {
    using NativeType = VkMemoryHeap;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryHeap( DeviceSize size_ = {}, MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : size{ size_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) ) {}

    MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryHeap const *>( &rhs );
      return *this;
    }

    operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryHeap *>( this );
    }

    operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryHeap *>( this );
    }

    operator VkMemoryHeap const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryHeap *>( this );
    }

    operator VkMemoryHeap *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryHeap *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, MemoryHeapFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( size, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryHeap const & ) const = default;
#else
    bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( size == rhs.size ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize      size  = {};
    MemoryHeapFlags flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryHeap>
  {
    using Type = MemoryHeap;
  };
#endif

  // wrapper struct for struct VkMemoryHostPointerPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHostPointerPropertiesEXT.html
  struct MemoryHostPointerPropertiesEXT
  {
    using NativeType = VkMemoryHostPointerPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryHostPointerPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
    {
    }

    MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT *>( this );
    }

    operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( this );
    }

    operator VkMemoryHostPointerPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryHostPointerPropertiesEXT *>( this );
    }

    operator VkMemoryHostPointerPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryTypeBits );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
#else
    bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
#  endif
    }

    bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eMemoryHostPointerPropertiesEXT;
    void *        pNext          = {};
    uint32_t      memoryTypeBits = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryHostPointerPropertiesEXT>
  {
    using Type = MemoryHostPointerPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
  {
    using Type = MemoryHostPointerPropertiesEXT;
  };

  // wrapper struct for struct VkMemoryMapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapInfo.html
  struct MemoryMapInfo
  {
    using NativeType = VkMemoryMapInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryMapInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryMapInfo(
      MemoryMapFlags flags_ = {}, DeviceMemory memory_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , memory{ memory_ }
      , offset{ offset_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfo( *reinterpret_cast<MemoryMapInfo const *>( &rhs ) ) {}

    MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryMapInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryMapInfo *>( this );
    }

    operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryMapInfo *>( this );
    }

    operator VkMemoryMapInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryMapInfo *>( this );
    }

    operator VkMemoryMapInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryMapInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MemoryMapFlags const &, DeviceMemory const &, DeviceSize const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, memory, offset, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryMapInfo const & ) const = default;
#else
    bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) &&
             ( size == rhs.size );
#  endif
    }

    bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType  = StructureType::eMemoryMapInfo;
    const void *   pNext  = {};
    MemoryMapFlags flags  = {};
    DeviceMemory   memory = {};
    DeviceSize     offset = {};
    DeviceSize     size   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryMapInfo>
  {
    using Type = MemoryMapInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryMapInfo>
  {
    using Type = MemoryMapInfo;
  };

  using MemoryMapInfoKHR = MemoryMapInfo;

  // wrapper struct for struct VkMemoryMapPlacedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapPlacedInfoEXT.html
  struct MemoryMapPlacedInfoEXT
  {
    using NativeType = VkMemoryMapPlacedInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryMapPlacedInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pPlacedAddress{ pPlacedAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryMapPlacedInfoEXT( *reinterpret_cast<MemoryMapPlacedInfoEXT const *>( &rhs ) )
    {
    }

    MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryMapPlacedInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      pPlacedAddress = pPlacedAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryMapPlacedInfoEXT *>( this );
    }

    operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryMapPlacedInfoEXT *>( this );
    }

    operator VkMemoryMapPlacedInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryMapPlacedInfoEXT *>( this );
    }

    operator VkMemoryMapPlacedInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryMapPlacedInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pPlacedAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default;
#else
    bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPlacedAddress == rhs.pPlacedAddress );
#  endif
    }

    bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eMemoryMapPlacedInfoEXT;
    const void *  pNext          = {};
    void *        pPlacedAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryMapPlacedInfoEXT>
  {
    using Type = MemoryMapPlacedInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryMapPlacedInfoEXT>
  {
    using Type = MemoryMapPlacedInfoEXT;
  };

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkMemoryMetalHandlePropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMetalHandlePropertiesEXT.html
  struct MemoryMetalHandlePropertiesEXT
  {
    using NativeType = VkMemoryMetalHandlePropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryMetalHandlePropertiesEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryMetalHandlePropertiesEXT( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryMetalHandlePropertiesEXT( *reinterpret_cast<MemoryMetalHandlePropertiesEXT const *>( &rhs ) )
    {
    }

    MemoryMetalHandlePropertiesEXT & operator=( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryMetalHandlePropertiesEXT & operator=( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryMetalHandlePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkMemoryMetalHandlePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryMetalHandlePropertiesEXT *>( this );
    }

    operator VkMemoryMetalHandlePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryMetalHandlePropertiesEXT *>( this );
    }

    operator VkMemoryMetalHandlePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryMetalHandlePropertiesEXT *>( this );
    }

    operator VkMemoryMetalHandlePropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryMetalHandlePropertiesEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryMetalHandlePropertiesEXT const & ) const = default;
#  else
    bool operator==( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eMemoryMetalHandlePropertiesEXT;
    void *        pNext          = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryMetalHandlePropertiesEXT>
  {
    using Type = MemoryMetalHandlePropertiesEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryMetalHandlePropertiesEXT>
  {
    using Type = MemoryMetalHandlePropertiesEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

  // wrapper struct for struct VkMemoryOpaqueCaptureAddressAllocateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryOpaqueCaptureAddressAllocateInfo.html
  struct MemoryOpaqueCaptureAddressAllocateInfo
  {
    using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , opaqueCaptureAddress{ opaqueCaptureAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
    {
    }

    MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      opaqueCaptureAddress = opaqueCaptureAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

    operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

    operator VkMemoryOpaqueCaptureAddressAllocateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

    operator VkMemoryOpaqueCaptureAddressAllocateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, opaqueCaptureAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
#  endif
    }

    bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
    const void *  pNext                = {};
    uint64_t      opaqueCaptureAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryOpaqueCaptureAddressAllocateInfo>
  {
    using Type = MemoryOpaqueCaptureAddressAllocateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
  {
    using Type = MemoryOpaqueCaptureAddressAllocateInfo;
  };

  using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;

  // wrapper struct for struct VkMemoryPriorityAllocateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPriorityAllocateInfoEXT.html
  struct MemoryPriorityAllocateInfoEXT
  {
    using NativeType = VkMemoryPriorityAllocateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryPriorityAllocateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , priority{ priority_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
    {
    }

    MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
    {
      priority = priority_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>( this );
    }

    operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>( this );
    }

    operator VkMemoryPriorityAllocateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>( this );
    }

    operator VkMemoryPriorityAllocateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, priority );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default;
#else
    bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority );
#  endif
    }

    bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eMemoryPriorityAllocateInfoEXT;
    const void *  pNext    = {};
    float         priority = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryPriorityAllocateInfoEXT>
  {
    using Type = MemoryPriorityAllocateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
  {
    using Type = MemoryPriorityAllocateInfoEXT;
  };

  // wrapper struct for struct VkMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements.html
  struct MemoryRequirements
  {
    using NativeType = VkMemoryRequirements;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryRequirements( DeviceSize size_ = {}, DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : size{ size_ }
      , alignment{ alignment_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) ) {}

    MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryRequirements const *>( &rhs );
      return *this;
    }

    operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryRequirements *>( this );
    }

    operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryRequirements *>( this );
    }

    operator VkMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryRequirements *>( this );
    }

    operator VkMemoryRequirements *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceSize const &, DeviceSize const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( size, alignment, memoryTypeBits );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryRequirements const & ) const = default;
#else
    bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits );
#  endif
    }

    bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceSize size           = {};
    DeviceSize alignment      = {};
    uint32_t   memoryTypeBits = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryRequirements>
  {
    using Type = MemoryRequirements;
  };
#endif

  // wrapper struct for struct VkMemoryRequirements2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements2.html
  struct MemoryRequirements2
  {
    using NativeType = VkMemoryRequirements2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryRequirements2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryRequirements{ memoryRequirements_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
    {
    }

    MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryRequirements2 const *>( &rhs );
      return *this;
    }

    operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryRequirements2 *>( this );
    }

    operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryRequirements2 *>( this );
    }

    operator VkMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryRequirements2 *>( this );
    }

    operator VkMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryRequirements2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, MemoryRequirements const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryRequirements );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryRequirements2 const & ) const = default;
#else
    bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
#  endif
    }

    bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType              = StructureType::eMemoryRequirements2;
    void *             pNext              = {};
    MemoryRequirements memoryRequirements = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryRequirements2>
  {
    using Type = MemoryRequirements2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryRequirements2>
  {
    using Type = MemoryRequirements2;
  };

  using MemoryRequirements2KHR = MemoryRequirements2;

  // wrapper struct for struct VkMemoryType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryType.html
  struct MemoryType
  {
    using NativeType = VkMemoryType;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryType( MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : propertyFlags{ propertyFlags_ }
      , heapIndex{ heapIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) ) {}

    MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryType const *>( &rhs );
      return *this;
    }

    operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryType *>( this );
    }

    operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryType *>( this );
    }

    operator VkMemoryType const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryType *>( this );
    }

    operator VkMemoryType *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryType *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<MemoryPropertyFlags const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( propertyFlags, heapIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryType const & ) const = default;
#else
    bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex );
#  endif
    }

    bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    MemoryPropertyFlags propertyFlags = {};
    uint32_t            heapIndex     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryType>
  {
    using Type = MemoryType;
  };
#endif

  // wrapper struct for struct VkMemoryUnmapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapInfo.html
  struct MemoryUnmapInfo
  {
    using NativeType = VkMemoryUnmapInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryUnmapInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapFlags flags_ = {}, DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , memory{ memory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfo( *reinterpret_cast<MemoryUnmapInfo const *>( &rhs ) ) {}

    MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryUnmapInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( MemoryUnmapFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryUnmapInfo *>( this );
    }

    operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryUnmapInfo *>( this );
    }

    operator VkMemoryUnmapInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryUnmapInfo *>( this );
    }

    operator VkMemoryUnmapInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryUnmapInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MemoryUnmapFlags const &, DeviceMemory const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, memory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryUnmapInfo const & ) const = default;
#else
    bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory );
#  endif
    }

    bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType  = StructureType::eMemoryUnmapInfo;
    const void *     pNext  = {};
    MemoryUnmapFlags flags  = {};
    DeviceMemory     memory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryUnmapInfo>
  {
    using Type = MemoryUnmapInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryUnmapInfo>
  {
    using Type = MemoryUnmapInfo;
  };

  using MemoryUnmapInfoKHR = MemoryUnmapInfo;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkMemoryWin32HandlePropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryWin32HandlePropertiesKHR.html
  struct MemoryWin32HandlePropertiesKHR
  {
    using NativeType = VkMemoryWin32HandlePropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryWin32HandlePropertiesKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
    {
    }

    MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR *>( this );
    }

    operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( this );
    }

    operator VkMemoryWin32HandlePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR *>( this );
    }

    operator VkMemoryWin32HandlePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default;
#  else
    bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eMemoryWin32HandlePropertiesKHR;
    void *        pNext          = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryWin32HandlePropertiesKHR>
  {
    using Type = MemoryWin32HandlePropertiesKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
  {
    using Type = MemoryWin32HandlePropertiesKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkMemoryZirconHandlePropertiesFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryZirconHandlePropertiesFUCHSIA.html
  struct MemoryZirconHandlePropertiesFUCHSIA
  {
    using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
    {
    }

    MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
      return *this;
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default;
#  else
    bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
    void *        pNext          = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMemoryZirconHandlePropertiesFUCHSIA>
  {
    using Type = MemoryZirconHandlePropertiesFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
  {
    using Type = MemoryZirconHandlePropertiesFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  // wrapper struct for struct VkMetalSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMetalSurfaceCreateInfoEXT.html
  struct MetalSurfaceCreateInfoEXT
  {
    using NativeType = VkMetalSurfaceCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMetalSurfaceCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MetalSurfaceCreateInfoEXT( MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pLayer{ pLayer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
    {
    }

    MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      pLayer = pLayer_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( this );
    }

    operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>( this );
    }

    operator VkMetalSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( this );
    }

    operator VkMetalSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, MetalSurfaceCreateFlagsEXT const &, const CAMetalLayer * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pLayer );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default;
#  else
    bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer );
#    endif
    }

    bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType              sType  = StructureType::eMetalSurfaceCreateInfoEXT;
    const void *               pNext  = {};
    MetalSurfaceCreateFlagsEXT flags  = {};
    const CAMetalLayer *       pLayer = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMetalSurfaceCreateInfoEXT>
  {
    using Type = MetalSurfaceCreateInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
  {
    using Type = MetalSurfaceCreateInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

  // wrapper struct for struct VkMicromapBuildInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildInfoEXT.html
  struct MicromapBuildInfoEXT
  {
    using NativeType = VkMicromapBuildInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMicromapBuildInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapTypeEXT                  type_                = MicromapTypeEXT::eOpacityMicromap,
                                                  BuildMicromapFlagsEXT            flags_               = {},
                                                  BuildMicromapModeEXT             mode_                = BuildMicromapModeEXT::eBuild,
                                                  MicromapEXT                      dstMicromap_         = {},
                                                  uint32_t                         usageCountsCount_    = {},
                                                  const MicromapUsageEXT *         pUsageCounts_        = {},
                                                  const MicromapUsageEXT * const * ppUsageCounts_       = {},
                                                  DeviceOrHostAddressConstKHR      data_                = {},
                                                  DeviceOrHostAddressKHR           scratchData_         = {},
                                                  DeviceOrHostAddressConstKHR      triangleArray_       = {},
                                                  DeviceSize                       triangleArrayStride_ = {},
                                                  const void *                     pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , flags{ flags_ }
      , mode{ mode_ }
      , dstMicromap{ dstMicromap_ }
      , usageCountsCount{ usageCountsCount_ }
      , pUsageCounts{ pUsageCounts_ }
      , ppUsageCounts{ ppUsageCounts_ }
      , data{ data_ }
      , scratchData{ scratchData_ }
      , triangleArray{ triangleArray_ }
      , triangleArrayStride{ triangleArrayStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MicromapBuildInfoEXT( *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MicromapBuildInfoEXT( MicromapTypeEXT                                                 type_,
                          BuildMicromapFlagsEXT                                           flags_,
                          BuildMicromapModeEXT                                            mode_,
                          MicromapEXT                                                     dstMicromap_,
                          ArrayProxyNoTemporaries<const MicromapUsageEXT> const &         usageCounts_,
                          ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_        = {},
                          DeviceOrHostAddressConstKHR                                     data_                = {},
                          DeviceOrHostAddressKHR                                          scratchData_         = {},
                          DeviceOrHostAddressConstKHR                                     triangleArray_       = {},
                          DeviceSize                                                      triangleArrayStride_ = {},
                          const void *                                                    pNext_               = nullptr )
      : pNext( pNext_ )
      , type( type_ )
      , flags( flags_ )
      , mode( mode_ )
      , dstMicromap( dstMicromap_ )
      , usageCountsCount( static_cast<uint32_t>( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) )
      , pUsageCounts( usageCounts_.data() )
      , ppUsageCounts( pUsageCounts_.data() )
      , data( data_ )
      , scratchData( scratchData_ )
      , triangleArray( triangleArray_ )
      , triangleArrayStride( triangleArrayStride_ )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 );
#    else
      if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapBuildInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT
    {
      dstMicromap = dstMicromap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = usageCountsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      pUsageCounts = pUsageCounts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MicromapBuildInfoEXT & setUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT> const & usageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( usageCounts_.size() );
      pUsageCounts     = usageCounts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      ppUsageCounts = ppUsageCounts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MicromapBuildInfoEXT & setPUsageCounts( ArrayProxyNoTemporaries<const MicromapUsageEXT * const> const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      usageCountsCount = static_cast<uint32_t>( pUsageCounts_.size() );
      ppUsageCounts    = pUsageCounts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
    {
      scratchData = scratchData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArray( DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleArray = triangleArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleArrayStride = triangleArrayStride_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapBuildInfoEXT *>( this );
    }

    operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapBuildInfoEXT *>( this );
    }

    operator VkMicromapBuildInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapBuildInfoEXT *>( this );
    }

    operator VkMicromapBuildInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapBuildInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               MicromapTypeEXT const &,
               BuildMicromapFlagsEXT const &,
               BuildMicromapModeEXT const &,
               MicromapEXT const &,
               uint32_t const &,
               const MicromapUsageEXT * const &,
               const MicromapUsageEXT * const * const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceOrHostAddressKHR const &,
               DeviceOrHostAddressConstKHR const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride );
    }
#endif

  public:
    StructureType                    sType               = StructureType::eMicromapBuildInfoEXT;
    const void *                     pNext               = {};
    MicromapTypeEXT                  type                = MicromapTypeEXT::eOpacityMicromap;
    BuildMicromapFlagsEXT            flags               = {};
    BuildMicromapModeEXT             mode                = BuildMicromapModeEXT::eBuild;
    MicromapEXT                      dstMicromap         = {};
    uint32_t                         usageCountsCount    = {};
    const MicromapUsageEXT *         pUsageCounts        = {};
    const MicromapUsageEXT * const * ppUsageCounts       = {};
    DeviceOrHostAddressConstKHR      data                = {};
    DeviceOrHostAddressKHR           scratchData         = {};
    DeviceOrHostAddressConstKHR      triangleArray       = {};
    DeviceSize                       triangleArrayStride = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapBuildInfoEXT>
  {
    using Type = MicromapBuildInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMicromapBuildInfoEXT>
  {
    using Type = MicromapBuildInfoEXT;
  };

  // wrapper struct for struct VkMicromapBuildSizesInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildSizesInfoEXT.html
  struct MicromapBuildSizesInfoEXT
  {
    using NativeType = VkMicromapBuildSizesInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMicromapBuildSizesInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( DeviceSize   micromapSize_     = {},
                                                    DeviceSize   buildScratchSize_ = {},
                                                    Bool32       discardable_      = {},
                                                    const void * pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , micromapSize{ micromapSize_ }
      , buildScratchSize{ buildScratchSize_ }
      , discardable{ discardable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MicromapBuildSizesInfoEXT( *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs ) )
    {
    }

    MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapBuildSizesInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT
    {
      micromapSize = micromapSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
    {
      buildScratchSize = buildScratchSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT
    {
      discardable = discardable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapBuildSizesInfoEXT *>( this );
    }

    operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( this );
    }

    operator VkMicromapBuildSizesInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapBuildSizesInfoEXT *>( this );
    }

    operator VkMicromapBuildSizesInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceSize const &, DeviceSize const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default;
#else
    bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) &&
             ( discardable == rhs.discardable );
#  endif
    }

    bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::eMicromapBuildSizesInfoEXT;
    const void *  pNext            = {};
    DeviceSize    micromapSize     = {};
    DeviceSize    buildScratchSize = {};
    Bool32        discardable      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapBuildSizesInfoEXT>
  {
    using Type = MicromapBuildSizesInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMicromapBuildSizesInfoEXT>
  {
    using Type = MicromapBuildSizesInfoEXT;
  };

  // wrapper struct for struct VkMicromapCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateInfoEXT.html
  struct MicromapCreateInfoEXT
  {
    using NativeType = VkMicromapCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMicromapCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateFlagsEXT createFlags_   = {},
                                                Buffer                 buffer_        = {},
                                                DeviceSize             offset_        = {},
                                                DeviceSize             size_          = {},
                                                MicromapTypeEXT        type_          = MicromapTypeEXT::eOpacityMicromap,
                                                DeviceAddress          deviceAddress_ = {},
                                                const void *           pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , createFlags{ createFlags_ }
      , buffer{ buffer_ }
      , offset{ offset_ }
      , size{ size_ }
      , type{ type_ }
      , deviceAddress{ deviceAddress_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MicromapCreateInfoEXT( *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs ) )
    {
    }

    MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      createFlags = createFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapCreateInfoEXT *>( this );
    }

    operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapCreateInfoEXT *>( this );
    }

    operator VkMicromapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapCreateInfoEXT *>( this );
    }

    operator VkMicromapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               MicromapCreateFlagsEXT const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &,
               MicromapTypeEXT const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MicromapCreateInfoEXT const & ) const = default;
#else
    bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
             ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress );
#  endif
    }

    bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType         = StructureType::eMicromapCreateInfoEXT;
    const void *           pNext         = {};
    MicromapCreateFlagsEXT createFlags   = {};
    Buffer                 buffer        = {};
    DeviceSize             offset        = {};
    DeviceSize             size          = {};
    MicromapTypeEXT        type          = MicromapTypeEXT::eOpacityMicromap;
    DeviceAddress          deviceAddress = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapCreateInfoEXT>
  {
    using Type = MicromapCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMicromapCreateInfoEXT>
  {
    using Type = MicromapCreateInfoEXT;
  };

  // wrapper struct for struct VkMicromapTriangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapTriangleEXT.html
  struct MicromapTriangleEXT
  {
    using NativeType = VkMicromapTriangleEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT
      : dataOffset{ dataOffset_ }
      , subdivisionLevel{ subdivisionLevel_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast<MicromapTriangleEXT const *>( &rhs ) )
    {
    }

    MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapTriangleEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dataOffset = dataOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      subdivisionLevel = subdivisionLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapTriangleEXT *>( this );
    }

    operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapTriangleEXT *>( this );
    }

    operator VkMicromapTriangleEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapTriangleEXT *>( this );
    }

    operator VkMicromapTriangleEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapTriangleEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint16_t const &, uint16_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( dataOffset, subdivisionLevel, format );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MicromapTriangleEXT const & ) const = default;
#else
    bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format );
#  endif
    }

    bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t dataOffset       = {};
    uint16_t subdivisionLevel = {};
    uint16_t format           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapTriangleEXT>
  {
    using Type = MicromapTriangleEXT;
  };
#endif

  // wrapper struct for struct VkMicromapVersionInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapVersionInfoEXT.html
  struct MicromapVersionInfoEXT
  {
    using NativeType = VkMicromapVersionInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMicromapVersionInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pVersionData{ pVersionData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MicromapVersionInfoEXT( *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs ) )
    {
    }

    MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MicromapVersionInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
    {
      pVersionData = pVersionData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMicromapVersionInfoEXT *>( this );
    }

    operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMicromapVersionInfoEXT *>( this );
    }

    operator VkMicromapVersionInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMicromapVersionInfoEXT *>( this );
    }

    operator VkMicromapVersionInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMicromapVersionInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const uint8_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pVersionData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MicromapVersionInfoEXT const & ) const = default;
#else
    bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
#  endif
    }

    bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType        = StructureType::eMicromapVersionInfoEXT;
    const void *    pNext        = {};
    const uint8_t * pVersionData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMicromapVersionInfoEXT>
  {
    using Type = MicromapVersionInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMicromapVersionInfoEXT>
  {
    using Type = MicromapVersionInfoEXT;
  };

  // wrapper struct for struct VkMultiDrawIndexedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawIndexedInfoEXT.html
  struct MultiDrawIndexedInfoEXT
  {
    using NativeType = VkMultiDrawIndexedInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : firstIndex{ firstIndex_ }
      , indexCount{ indexCount_ }
      , vertexOffset{ vertexOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultiDrawIndexedInfoEXT( *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs ) )
    {
    }

    MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstIndex = firstIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexCount = indexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexOffset = vertexOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( this );
    }

    operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultiDrawIndexedInfoEXT *>( this );
    }

    operator VkMultiDrawIndexedInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( this );
    }

    operator VkMultiDrawIndexedInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultiDrawIndexedInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( firstIndex, indexCount, vertexOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default;
#else
    bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset );
#  endif
    }

    bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t firstIndex   = {};
    uint32_t indexCount   = {};
    int32_t  vertexOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultiDrawIndexedInfoEXT>
  {
    using Type = MultiDrawIndexedInfoEXT;
  };
#endif

  // wrapper struct for struct VkMultiDrawInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawInfoEXT.html
  struct MultiDrawInfoEXT
  {
    using NativeType = VkMultiDrawInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : firstVertex{ firstVertex_ }
      , vertexCount{ vertexCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) ) {}

    MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstVertex = firstVertex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultiDrawInfoEXT *>( this );
    }

    operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultiDrawInfoEXT *>( this );
    }

    operator VkMultiDrawInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultiDrawInfoEXT *>( this );
    }

    operator VkMultiDrawInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultiDrawInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( firstVertex, vertexCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultiDrawInfoEXT const & ) const = default;
#else
    bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount );
#  endif
    }

    bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t firstVertex = {};
    uint32_t vertexCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultiDrawInfoEXT>
  {
    using Type = MultiDrawInfoEXT;
  };
#endif

  // wrapper struct for struct VkMultisamplePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisamplePropertiesEXT.html
  struct MultisamplePropertiesEXT
  {
    using NativeType = VkMultisamplePropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMultisamplePropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxSampleLocationGridSize{ maxSampleLocationGridSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
    {
    }

    MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultisamplePropertiesEXT *>( this );
    }

    operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultisamplePropertiesEXT *>( this );
    }

    operator VkMultisamplePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultisamplePropertiesEXT *>( this );
    }

    operator VkMultisamplePropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultisamplePropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxSampleLocationGridSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
#else
    bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
#  endif
    }

    bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::eMultisamplePropertiesEXT;
    void *        pNext                     = {};
    Extent2D      maxSampleLocationGridSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultisamplePropertiesEXT>
  {
    using Type = MultisamplePropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
  {
    using Type = MultisamplePropertiesEXT;
  };

  // wrapper struct for struct VkMultisampledRenderToSingleSampledInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisampledRenderToSingleSampledInfoEXT.html
  struct MultisampledRenderToSingleSampledInfoEXT
  {
    using NativeType = VkMultisampledRenderToSingleSampledInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMultisampledRenderToSingleSampledInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( Bool32              multisampledRenderToSingleSampledEnable_ = {},
                                                                   SampleCountFlagBits rasterizationSamples_                    = SampleCountFlagBits::e1,
                                                                   const void *        pNext_                                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ }
      , rasterizationSamples{ rasterizationSamples_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast<MultisampledRenderToSingleSampledInfoEXT const *>( &rhs ) )
    {
    }

    MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultisampledRenderToSingleSampledInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT &
      setMultisampledRenderToSingleSampledEnable( Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationSamples = rasterizationSamples_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT *>( this );
    }

    operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT *>( this );
    }

    operator VkMultisampledRenderToSingleSampledInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultisampledRenderToSingleSampledInfoEXT *>( this );
    }

    operator VkMultisampledRenderToSingleSampledInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultisampledRenderToSingleSampledInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, SampleCountFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default;
#else
    bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) &&
             ( rasterizationSamples == rhs.rasterizationSamples );
#  endif
    }

    bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                                   = StructureType::eMultisampledRenderToSingleSampledInfoEXT;
    const void *        pNext                                   = {};
    Bool32              multisampledRenderToSingleSampledEnable = {};
    SampleCountFlagBits rasterizationSamples                    = SampleCountFlagBits::e1;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultisampledRenderToSingleSampledInfoEXT>
  {
    using Type = MultisampledRenderToSingleSampledInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMultisampledRenderToSingleSampledInfoEXT>
  {
    using Type = MultisampledRenderToSingleSampledInfoEXT;
  };

  // wrapper struct for struct VkMultiviewPerViewAttributesInfoNVX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewAttributesInfoNVX.html
  struct MultiviewPerViewAttributesInfoNVX
  {
    using NativeType = VkMultiviewPerViewAttributesInfoNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMultiviewPerViewAttributesInfoNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( Bool32       perViewAttributes_              = {},
                                                            Bool32       perViewAttributesPositionXOnly_ = {},
                                                            const void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , perViewAttributes{ perViewAttributes_ }
      , perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs ) )
    {
    }

    MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      perViewAttributes = perViewAttributes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
    {
      perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX *>( this );
    }

    operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX *>( this );
    }

    operator VkMultiviewPerViewAttributesInfoNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX *>( this );
    }

    operator VkMultiviewPerViewAttributesInfoNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default;
#else
    bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) &&
             ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly );
#  endif
    }

    bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::eMultiviewPerViewAttributesInfoNVX;
    const void *  pNext                          = {};
    Bool32        perViewAttributes              = {};
    Bool32        perViewAttributesPositionXOnly = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultiviewPerViewAttributesInfoNVX>
  {
    using Type = MultiviewPerViewAttributesInfoNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMultiviewPerViewAttributesInfoNVX>
  {
    using Type = MultiviewPerViewAttributesInfoNVX;
  };

  // wrapper struct for struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM.html
  struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM
  {
    using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t       perViewRenderAreaCount_ = {},
                                                                             const Rect2D * pPerViewRenderAreas_    = {},
                                                                             const void *   pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , perViewRenderAreaCount{ perViewRenderAreaCount_ }
      , pPerViewRenderAreas{ pPerViewRenderAreas_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast<MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( ArrayProxyNoTemporaries<const Rect2D> const & perViewRenderAreas_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), perViewRenderAreaCount( static_cast<uint32_t>( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &
      operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &
      setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
    {
      perViewRenderAreaCount = perViewRenderAreaCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &
      setPPerViewRenderAreas( const Rect2D * pPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      pPerViewRenderAreas = pPerViewRenderAreas_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &
      setPerViewRenderAreas( ArrayProxyNoTemporaries<const Rect2D> const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      perViewRenderAreaCount = static_cast<uint32_t>( perViewRenderAreas_.size() );
      pPerViewRenderAreas    = perViewRenderAreas_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *>( this );
    }

    operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *>( this );
    }

    operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *>( this );
    }

    operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Rect2D * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default;
#else
    bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewRenderAreaCount == rhs.perViewRenderAreaCount ) &&
             ( pPerViewRenderAreas == rhs.pPerViewRenderAreas );
#  endif
    }

    bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                  = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
    const void *   pNext                  = {};
    uint32_t       perViewRenderAreaCount = {};
    const Rect2D * pPerViewRenderAreas    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM>
  {
    using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM>
  {
    using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
  };

  // wrapper struct for struct VkMutableDescriptorTypeListEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeListEXT.html
  struct MutableDescriptorTypeListEXT
  {
    using NativeType = VkMutableDescriptorTypeListEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, const DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : descriptorTypeCount{ descriptorTypeCount_ }
      , pDescriptorTypes{ pDescriptorTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MutableDescriptorTypeListEXT( *reinterpret_cast<MutableDescriptorTypeListEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeListEXT( ArrayProxyNoTemporaries<const DescriptorType> const & descriptorTypes_ )
      : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MutableDescriptorTypeListEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorTypeCount = descriptorTypeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setPDescriptorTypes( const DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorTypes = pDescriptorTypes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeListEXT & setDescriptorTypes( ArrayProxyNoTemporaries<const DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
      pDescriptorTypes    = descriptorTypes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMutableDescriptorTypeListEXT *>( this );
    }

    operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMutableDescriptorTypeListEXT *>( this );
    }

    operator VkMutableDescriptorTypeListEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMutableDescriptorTypeListEXT *>( this );
    }

    operator VkMutableDescriptorTypeListEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMutableDescriptorTypeListEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, const DescriptorType * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( descriptorTypeCount, pDescriptorTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default;
#else
    bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes );
#  endif
    }

    bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t               descriptorTypeCount = {};
    const DescriptorType * pDescriptorTypes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMutableDescriptorTypeListEXT>
  {
    using Type = MutableDescriptorTypeListEXT;
  };
#endif
  using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;

  // wrapper struct for struct VkMutableDescriptorTypeCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeCreateInfoEXT.html
  struct MutableDescriptorTypeCreateInfoEXT
  {
    using NativeType = VkMutableDescriptorTypeCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eMutableDescriptorTypeCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t                             mutableDescriptorTypeListCount_ = {},
                                                             const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_    = {},
                                                             const void *                         pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ }
      , pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ }
    {
    }

    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast<MutableDescriptorTypeCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeCreateInfoEXT( ArrayProxyNoTemporaries<const MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_,
                                        const void *                                                        pNext_ = nullptr )
      : pNext( pNext_ )
      , mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) )
      , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<MutableDescriptorTypeCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT &
      setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT &
      setPMutableDescriptorTypeLists( const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
    {
      pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeCreateInfoEXT &
      setMutableDescriptorTypeLists( ArrayProxyNoTemporaries<const MutableDescriptorTypeListEXT> const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
      pMutableDescriptorTypeLists    = mutableDescriptorTypeLists_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT *>( this );
    }

    operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT *>( this );
    }

    operator VkMutableDescriptorTypeCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkMutableDescriptorTypeCreateInfoEXT *>( this );
    }

    operator VkMutableDescriptorTypeCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkMutableDescriptorTypeCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const MutableDescriptorTypeListEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default;
#else
    bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) &&
             ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
#  endif
    }

    bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType                          = StructureType::eMutableDescriptorTypeCreateInfoEXT;
    const void *                         pNext                          = {};
    uint32_t                             mutableDescriptorTypeListCount = {};
    const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkMutableDescriptorTypeCreateInfoEXT>
  {
    using Type = MutableDescriptorTypeCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoEXT>
  {
    using Type = MutableDescriptorTypeCreateInfoEXT;
  };

  using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkNativeBufferFormatPropertiesOHOS, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferFormatPropertiesOHOS.html
  struct NativeBufferFormatPropertiesOHOS
  {
    using NativeType = VkNativeBufferFormatPropertiesOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eNativeBufferFormatPropertiesOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR NativeBufferFormatPropertiesOHOS( Format                      format_                           = Format::eUndefined,
                                                           uint64_t                    externalFormat_                   = {},
                                                           FormatFeatureFlags          formatFeatures_                   = {},
                                                           ComponentMapping            samplerYcbcrConversionComponents_ = {},
                                                           SamplerYcbcrModelConversion suggestedYcbcrModel_    = SamplerYcbcrModelConversion::eRgbIdentity,
                                                           SamplerYcbcrRange           suggestedYcbcrRange_    = SamplerYcbcrRange::eItuFull,
                                                           ChromaLocation              suggestedXChromaOffset_ = ChromaLocation::eCositedEven,
                                                           ChromaLocation              suggestedYChromaOffset_ = ChromaLocation::eCositedEven,
                                                           void *                      pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , externalFormat{ externalFormat_ }
      , formatFeatures{ formatFeatures_ }
      , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }
      , suggestedYcbcrModel{ suggestedYcbcrModel_ }
      , suggestedYcbcrRange{ suggestedYcbcrRange_ }
      , suggestedXChromaOffset{ suggestedXChromaOffset_ }
      , suggestedYChromaOffset{ suggestedYChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR NativeBufferFormatPropertiesOHOS( NativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    NativeBufferFormatPropertiesOHOS( VkNativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : NativeBufferFormatPropertiesOHOS( *reinterpret_cast<NativeBufferFormatPropertiesOHOS const *>( &rhs ) )
    {
    }

    NativeBufferFormatPropertiesOHOS & operator=( NativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    NativeBufferFormatPropertiesOHOS & operator=( VkNativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<NativeBufferFormatPropertiesOHOS const *>( &rhs );
      return *this;
    }

    operator VkNativeBufferFormatPropertiesOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkNativeBufferFormatPropertiesOHOS *>( this );
    }

    operator VkNativeBufferFormatPropertiesOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkNativeBufferFormatPropertiesOHOS *>( this );
    }

    operator VkNativeBufferFormatPropertiesOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkNativeBufferFormatPropertiesOHOS *>( this );
    }

    operator VkNativeBufferFormatPropertiesOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkNativeBufferFormatPropertiesOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               uint64_t const &,
               FormatFeatureFlags const &,
               ComponentMapping const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ChromaLocation const &,
               ChromaLocation const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       format,
                       externalFormat,
                       formatFeatures,
                       samplerYcbcrConversionComponents,
                       suggestedYcbcrModel,
                       suggestedYcbcrRange,
                       suggestedXChromaOffset,
                       suggestedYChromaOffset );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( NativeBufferFormatPropertiesOHOS const & ) const = default;
#  else
    bool operator==( NativeBufferFormatPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
             ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
             ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
             ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#    endif
    }

    bool operator!=( NativeBufferFormatPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                            = StructureType::eNativeBufferFormatPropertiesOHOS;
    void *                      pNext                            = {};
    Format                      format                           = Format::eUndefined;
    uint64_t                    externalFormat                   = {};
    FormatFeatureFlags          formatFeatures                   = {};
    ComponentMapping            samplerYcbcrConversionComponents = {};
    SamplerYcbcrModelConversion suggestedYcbcrModel              = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           suggestedYcbcrRange              = SamplerYcbcrRange::eItuFull;
    ChromaLocation              suggestedXChromaOffset           = ChromaLocation::eCositedEven;
    ChromaLocation              suggestedYChromaOffset           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkNativeBufferFormatPropertiesOHOS>
  {
    using Type = NativeBufferFormatPropertiesOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eNativeBufferFormatPropertiesOHOS>
  {
    using Type = NativeBufferFormatPropertiesOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkNativeBufferOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferOHOS.html
  struct NativeBufferOHOS
  {
    using NativeType = VkNativeBufferOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eNativeBufferOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR NativeBufferOHOS( struct OHBufferHandle * handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handle{ handle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR NativeBufferOHOS( NativeBufferOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    NativeBufferOHOS( VkNativeBufferOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : NativeBufferOHOS( *reinterpret_cast<NativeBufferOHOS const *>( &rhs ) ) {}

    NativeBufferOHOS & operator=( NativeBufferOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    NativeBufferOHOS & operator=( VkNativeBufferOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<NativeBufferOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 NativeBufferOHOS & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 NativeBufferOHOS & setHandle( struct OHBufferHandle * handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkNativeBufferOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkNativeBufferOHOS *>( this );
    }

    operator VkNativeBufferOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkNativeBufferOHOS *>( this );
    }

    operator VkNativeBufferOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkNativeBufferOHOS *>( this );
    }

    operator VkNativeBufferOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkNativeBufferOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, struct OHBufferHandle * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handle );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( NativeBufferOHOS const & ) const = default;
#  else
    bool operator==( NativeBufferOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handle == rhs.handle );
#    endif
    }

    bool operator!=( NativeBufferOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType           sType  = StructureType::eNativeBufferOHOS;
    const void *            pNext  = {};
    struct OHBufferHandle * handle = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkNativeBufferOHOS>
  {
    using Type = NativeBufferOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eNativeBufferOHOS>
  {
    using Type = NativeBufferOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkNativeBufferPropertiesOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferPropertiesOHOS.html
  struct NativeBufferPropertiesOHOS
  {
    using NativeType = VkNativeBufferPropertiesOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eNativeBufferPropertiesOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      NativeBufferPropertiesOHOS( DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allocationSize{ allocationSize_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR NativeBufferPropertiesOHOS( NativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    NativeBufferPropertiesOHOS( VkNativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : NativeBufferPropertiesOHOS( *reinterpret_cast<NativeBufferPropertiesOHOS const *>( &rhs ) )
    {
    }

    NativeBufferPropertiesOHOS & operator=( NativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    NativeBufferPropertiesOHOS & operator=( VkNativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<NativeBufferPropertiesOHOS const *>( &rhs );
      return *this;
    }

    operator VkNativeBufferPropertiesOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkNativeBufferPropertiesOHOS *>( this );
    }

    operator VkNativeBufferPropertiesOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkNativeBufferPropertiesOHOS *>( this );
    }

    operator VkNativeBufferPropertiesOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkNativeBufferPropertiesOHOS *>( this );
    }

    operator VkNativeBufferPropertiesOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkNativeBufferPropertiesOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allocationSize, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( NativeBufferPropertiesOHOS const & ) const = default;
#  else
    bool operator==( NativeBufferPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( NativeBufferPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eNativeBufferPropertiesOHOS;
    void *        pNext          = {};
    DeviceSize    allocationSize = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkNativeBufferPropertiesOHOS>
  {
    using Type = NativeBufferPropertiesOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eNativeBufferPropertiesOHOS>
  {
    using Type = NativeBufferPropertiesOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkNativeBufferUsageOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferUsageOHOS.html
  struct NativeBufferUsageOHOS
  {
    using NativeType = VkNativeBufferUsageOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eNativeBufferUsageOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR NativeBufferUsageOHOS( uint64_t OHOSNativeBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , OHOSNativeBufferUsage{ OHOSNativeBufferUsage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR NativeBufferUsageOHOS( NativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    NativeBufferUsageOHOS( VkNativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : NativeBufferUsageOHOS( *reinterpret_cast<NativeBufferUsageOHOS const *>( &rhs ) )
    {
    }

    NativeBufferUsageOHOS & operator=( NativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    NativeBufferUsageOHOS & operator=( VkNativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<NativeBufferUsageOHOS const *>( &rhs );
      return *this;
    }

    operator VkNativeBufferUsageOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkNativeBufferUsageOHOS *>( this );
    }

    operator VkNativeBufferUsageOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkNativeBufferUsageOHOS *>( this );
    }

    operator VkNativeBufferUsageOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkNativeBufferUsageOHOS *>( this );
    }

    operator VkNativeBufferUsageOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkNativeBufferUsageOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, OHOSNativeBufferUsage );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( NativeBufferUsageOHOS const & ) const = default;
#  else
    bool operator==( NativeBufferUsageOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( OHOSNativeBufferUsage == rhs.OHOSNativeBufferUsage );
#    endif
    }

    bool operator!=( NativeBufferUsageOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                 = StructureType::eNativeBufferUsageOHOS;
    void *        pNext                 = {};
    uint64_t      OHOSNativeBufferUsage = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkNativeBufferUsageOHOS>
  {
    using Type = NativeBufferUsageOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eNativeBufferUsageOHOS>
  {
    using Type = NativeBufferUsageOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

  // wrapper struct for struct VkOpaqueCaptureDescriptorDataCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html
  struct OpaqueCaptureDescriptorDataCreateInfoEXT
  {
    using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {},
                                                                   const void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast<OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs ) )
    {
    }

    OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpaqueCaptureDescriptorDataCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT &
      setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT
    {
      opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpaqueCaptureDescriptorDataCreateInfoEXT *>( this );
    }

    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpaqueCaptureDescriptorDataCreateInfoEXT *>( this );
    }

    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpaqueCaptureDescriptorDataCreateInfoEXT *>( this );
    }

    operator VkOpaqueCaptureDescriptorDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpaqueCaptureDescriptorDataCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, opaqueCaptureDescriptorData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default;
#else
    bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData );
#  endif
    }

    bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT;
    const void *  pNext                       = {};
    const void *  opaqueCaptureDescriptorData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpaqueCaptureDescriptorDataCreateInfoEXT>
  {
    using Type = OpaqueCaptureDescriptorDataCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT>
  {
    using Type = OpaqueCaptureDescriptorDataCreateInfoEXT;
  };

  // wrapper struct for struct VkOpticalFlowExecuteInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteInfoNV.html
  struct OpticalFlowExecuteInfoNV
  {
    using NativeType = VkOpticalFlowExecuteInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpticalFlowExecuteInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteFlagsNV flags_       = {},
                                                   uint32_t                  regionCount_ = {},
                                                   const Rect2D *            pRegions_    = {},
                                                   void *                    pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpticalFlowExecuteInfoNV( *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    OpticalFlowExecuteInfoNV( OpticalFlowExecuteFlagsNV flags_, ArrayProxyNoTemporaries<const Rect2D> const & regions_, void * pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpticalFlowExecuteInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    OpticalFlowExecuteInfoNV & setRegions( ArrayProxyNoTemporaries<const Rect2D> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( this );
    }

    operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpticalFlowExecuteInfoNV *>( this );
    }

    operator VkOpticalFlowExecuteInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( this );
    }

    operator VkOpticalFlowExecuteInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpticalFlowExecuteInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, OpticalFlowExecuteFlagsNV const &, uint32_t const &, const Rect2D * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default;
#else
    bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType       = StructureType::eOpticalFlowExecuteInfoNV;
    void *                    pNext       = {};
    OpticalFlowExecuteFlagsNV flags       = {};
    uint32_t                  regionCount = {};
    const Rect2D *            pRegions    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpticalFlowExecuteInfoNV>
  {
    using Type = OpticalFlowExecuteInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpticalFlowExecuteInfoNV>
  {
    using Type = OpticalFlowExecuteInfoNV;
  };

  // wrapper struct for struct VkOpticalFlowImageFormatInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatInfoNV.html
  struct OpticalFlowImageFormatInfoNV
  {
    using NativeType = VkOpticalFlowImageFormatInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpticalFlowImageFormatInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowUsageFlagsNV usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpticalFlowImageFormatInfoNV( *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs ) )
    {
    }

    OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpticalFlowImageFormatInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( this );
    }

    operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpticalFlowImageFormatInfoNV *>( this );
    }

    operator VkOpticalFlowImageFormatInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( this );
    }

    operator VkOpticalFlowImageFormatInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpticalFlowImageFormatInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, OpticalFlowUsageFlagsNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, usage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default;
#else
    bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
#  endif
    }

    bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType = StructureType::eOpticalFlowImageFormatInfoNV;
    const void *            pNext = {};
    OpticalFlowUsageFlagsNV usage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpticalFlowImageFormatInfoNV>
  {
    using Type = OpticalFlowImageFormatInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpticalFlowImageFormatInfoNV>
  {
    using Type = OpticalFlowImageFormatInfoNV;
  };

  // wrapper struct for struct VkOpticalFlowImageFormatPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatPropertiesNV.html
  struct OpticalFlowImageFormatPropertiesNV
  {
    using NativeType = VkOpticalFlowImageFormatPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpticalFlowImageFormatPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( Format format_ = Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs ) )
    {
    }

    OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpticalFlowImageFormatPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV *>( this );
    }

    operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( this );
    }

    operator VkOpticalFlowImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpticalFlowImageFormatPropertiesNV *>( this );
    }

    operator VkOpticalFlowImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Format const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, format );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default;
#else
    bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format );
#  endif
    }

    bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eOpticalFlowImageFormatPropertiesNV;
    void *        pNext  = {};
    Format        format = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpticalFlowImageFormatPropertiesNV>
  {
    using Type = OpticalFlowImageFormatPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpticalFlowImageFormatPropertiesNV>
  {
    using Type = OpticalFlowImageFormatPropertiesNV;
  };

  // wrapper struct for struct VkOpticalFlowSessionCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateInfoNV.html
  struct OpticalFlowSessionCreateInfoNV
  {
    using NativeType = VkOpticalFlowSessionCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpticalFlowSessionCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( uint32_t                        width_            = {},
                                                         uint32_t                        height_           = {},
                                                         Format                          imageFormat_      = Format::eUndefined,
                                                         Format                          flowVectorFormat_ = Format::eUndefined,
                                                         Format                          costFormat_       = Format::eUndefined,
                                                         OpticalFlowGridSizeFlagsNV      outputGridSize_   = {},
                                                         OpticalFlowGridSizeFlagsNV      hintGridSize_     = {},
                                                         OpticalFlowPerformanceLevelNV   performanceLevel_ = OpticalFlowPerformanceLevelNV::eUnknown,
                                                         OpticalFlowSessionCreateFlagsNV flags_            = {},
                                                         void *                          pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , width{ width_ }
      , height{ height_ }
      , imageFormat{ imageFormat_ }
      , flowVectorFormat{ flowVectorFormat_ }
      , costFormat{ costFormat_ }
      , outputGridSize{ outputGridSize_ }
      , hintGridSize{ hintGridSize_ }
      , performanceLevel{ performanceLevel_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpticalFlowSessionCreateInfoNV( *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs ) )
    {
    }

    OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpticalFlowSessionCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      imageFormat = imageFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      flowVectorFormat = flowVectorFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( Format costFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      costFormat = costFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setOutputGridSize( OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT
    {
      outputGridSize = outputGridSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHintGridSize( OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT
    {
      hintGridSize = hintGridSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPerformanceLevel( OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceLevel = performanceLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpticalFlowSessionCreateInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpticalFlowSessionCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               Format const &,
               Format const &,
               Format const &,
               OpticalFlowGridSizeFlagsNV const &,
               OpticalFlowGridSizeFlagsNV const &,
               OpticalFlowPerformanceLevelNV const &,
               OpticalFlowSessionCreateFlagsNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default;
#else
    bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) &&
             ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) &&
             ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType            = StructureType::eOpticalFlowSessionCreateInfoNV;
    void *                          pNext            = {};
    uint32_t                        width            = {};
    uint32_t                        height           = {};
    Format                          imageFormat      = Format::eUndefined;
    Format                          flowVectorFormat = Format::eUndefined;
    Format                          costFormat       = Format::eUndefined;
    OpticalFlowGridSizeFlagsNV      outputGridSize   = {};
    OpticalFlowGridSizeFlagsNV      hintGridSize     = {};
    OpticalFlowPerformanceLevelNV   performanceLevel = OpticalFlowPerformanceLevelNV::eUnknown;
    OpticalFlowSessionCreateFlagsNV flags            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpticalFlowSessionCreateInfoNV>
  {
    using Type = OpticalFlowSessionCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpticalFlowSessionCreateInfoNV>
  {
    using Type = OpticalFlowSessionCreateInfoNV;
  };

  // wrapper struct for struct VkOpticalFlowSessionCreatePrivateDataInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreatePrivateDataInfoNV.html
  struct OpticalFlowSessionCreatePrivateDataInfoNV
  {
    using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t     id_           = {},
                                                                    uint32_t     size_         = {},
                                                                    const void * pPrivateData_ = {},
                                                                    void *       pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , id{ id_ }
      , size{ size_ }
      , pPrivateData{ pPrivateData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs ) )
    {
    }

    OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OpticalFlowSessionCreatePrivateDataInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT
    {
      id = id_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT
    {
      pPrivateData = pPrivateData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreatePrivateDataInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
    }

    operator VkOpticalFlowSessionCreatePrivateDataInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOpticalFlowSessionCreatePrivateDataInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, id, size, pPrivateData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default;
#else
    bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData );
#  endif
    }

    bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV;
    void *        pNext        = {};
    uint32_t      id           = {};
    uint32_t      size         = {};
    const void *  pPrivateData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOpticalFlowSessionCreatePrivateDataInfoNV>
  {
    using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV>
  {
    using Type = OpticalFlowSessionCreatePrivateDataInfoNV;
  };

  // wrapper struct for struct VkOutOfBandQueueTypeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOutOfBandQueueTypeInfoNV.html
  struct OutOfBandQueueTypeInfoNV
  {
    using NativeType = VkOutOfBandQueueTypeInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eOutOfBandQueueTypeInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeNV queueType_ = OutOfBandQueueTypeNV::eRender,
                                                   const void *         pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueType{ queueType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : OutOfBandQueueTypeInfoNV( *reinterpret_cast<OutOfBandQueueTypeInfoNV const *>( &rhs ) )
    {
    }

    OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<OutOfBandQueueTypeInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT
    {
      queueType = queueType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( this );
    }

    operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOutOfBandQueueTypeInfoNV *>( this );
    }

    operator VkOutOfBandQueueTypeInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( this );
    }

    operator VkOutOfBandQueueTypeInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkOutOfBandQueueTypeInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, OutOfBandQueueTypeNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queueType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default;
#else
    bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType );
#  endif
    }

    bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType     = StructureType::eOutOfBandQueueTypeInfoNV;
    const void *         pNext     = {};
    OutOfBandQueueTypeNV queueType = OutOfBandQueueTypeNV::eRender;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkOutOfBandQueueTypeInfoNV>
  {
    using Type = OutOfBandQueueTypeInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eOutOfBandQueueTypeInfoNV>
  {
    using Type = OutOfBandQueueTypeInfoNV;
  };

  // wrapper struct for struct VkPartitionedAccelerationStructureFlagsNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureFlagsNV.html
  struct PartitionedAccelerationStructureFlagsNV
  {
    using NativeType = VkPartitionedAccelerationStructureFlagsNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePartitionedAccelerationStructureFlagsNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( Bool32 enablePartitionTranslation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , enablePartitionTranslation{ enablePartitionTranslation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PartitionedAccelerationStructureFlagsNV( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PartitionedAccelerationStructureFlagsNV( *reinterpret_cast<PartitionedAccelerationStructureFlagsNV const *>( &rhs ) )
    {
    }

    PartitionedAccelerationStructureFlagsNV & operator=( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PartitionedAccelerationStructureFlagsNV & operator=( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PartitionedAccelerationStructureFlagsNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setEnablePartitionTranslation( Bool32 enablePartitionTranslation_ ) VULKAN_HPP_NOEXCEPT
    {
      enablePartitionTranslation = enablePartitionTranslation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPartitionedAccelerationStructureFlagsNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPartitionedAccelerationStructureFlagsNV *>( this );
    }

    operator VkPartitionedAccelerationStructureFlagsNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPartitionedAccelerationStructureFlagsNV *>( this );
    }

    operator VkPartitionedAccelerationStructureFlagsNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPartitionedAccelerationStructureFlagsNV *>( this );
    }

    operator VkPartitionedAccelerationStructureFlagsNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPartitionedAccelerationStructureFlagsNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, enablePartitionTranslation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PartitionedAccelerationStructureFlagsNV const & ) const = default;
#else
    bool operator==( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enablePartitionTranslation == rhs.enablePartitionTranslation );
#  endif
    }

    bool operator!=( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePartitionedAccelerationStructureFlagsNV;
    void *        pNext                      = {};
    Bool32        enablePartitionTranslation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPartitionedAccelerationStructureFlagsNV>
  {
    using Type = PartitionedAccelerationStructureFlagsNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePartitionedAccelerationStructureFlagsNV>
  {
    using Type = PartitionedAccelerationStructureFlagsNV;
  };

  // wrapper struct for struct VkPartitionedAccelerationStructureUpdateInstanceDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureUpdateInstanceDataNV.html
  struct PartitionedAccelerationStructureUpdateInstanceDataNV
  {
    using NativeType = VkPartitionedAccelerationStructureUpdateInstanceDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureUpdateInstanceDataNV( uint32_t      instanceIndex_                       = {},
                                                                               uint32_t      instanceContributionToHitGroupIndex_ = {},
                                                                               DeviceAddress accelerationStructure_               = {} ) VULKAN_HPP_NOEXCEPT
      : instanceIndex{ instanceIndex_ }
      , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ }
      , accelerationStructure{ accelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PartitionedAccelerationStructureUpdateInstanceDataNV( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PartitionedAccelerationStructureUpdateInstanceDataNV( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PartitionedAccelerationStructureUpdateInstanceDataNV( *reinterpret_cast<PartitionedAccelerationStructureUpdateInstanceDataNV const *>( &rhs ) )
    {
    }

    PartitionedAccelerationStructureUpdateInstanceDataNV &
      operator=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PartitionedAccelerationStructureUpdateInstanceDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceIndex = instanceIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV &
      setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV &
      setAccelerationStructure( DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPartitionedAccelerationStructureUpdateInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureUpdateInstanceDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPartitionedAccelerationStructureUpdateInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPartitionedAccelerationStructureUpdateInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureUpdateInstanceDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPartitionedAccelerationStructureUpdateInstanceDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( instanceIndex, instanceContributionToHitGroupIndex, accelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PartitionedAccelerationStructureUpdateInstanceDataNV const & ) const = default;
#else
    bool operator==( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( instanceIndex == rhs.instanceIndex ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) &&
             ( accelerationStructure == rhs.accelerationStructure );
#  endif
    }

    bool operator!=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t      instanceIndex                       = {};
    uint32_t      instanceContributionToHitGroupIndex = {};
    DeviceAddress accelerationStructure               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPartitionedAccelerationStructureUpdateInstanceDataNV>
  {
    using Type = PartitionedAccelerationStructureUpdateInstanceDataNV;
  };
#endif

  // wrapper struct for struct VkPartitionedAccelerationStructureWriteInstanceDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWriteInstanceDataNV.html
  struct PartitionedAccelerationStructureWriteInstanceDataNV
  {
    using NativeType = VkPartitionedAccelerationStructureWriteInstanceDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV( TransformMatrixKHR           transform_                           = {},
                                                                                 std::array<float, 6> const & explicitAABB_                        = {},
                                                                                 uint32_t                     instanceID_                          = {},
                                                                                 uint32_t                     instanceMask_                        = {},
                                                                                 uint32_t                     instanceContributionToHitGroupIndex_ = {},
                                                                                 PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_    = {},
                                                                                 uint32_t                                        instanceIndex_    = {},
                                                                                 uint32_t                                        partitionIndex_   = {},
                                                                                 DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
      : transform{ transform_ }
      , explicitAABB{ explicitAABB_ }
      , instanceID{ instanceID_ }
      , instanceMask{ instanceMask_ }
      , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ }
      , instanceFlags{ instanceFlags_ }
      , instanceIndex{ instanceIndex_ }
      , partitionIndex{ partitionIndex_ }
      , accelerationStructure{ accelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PartitionedAccelerationStructureWriteInstanceDataNV( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PartitionedAccelerationStructureWriteInstanceDataNV( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PartitionedAccelerationStructureWriteInstanceDataNV( *reinterpret_cast<PartitionedAccelerationStructureWriteInstanceDataNV const *>( &rhs ) )
    {
    }

    PartitionedAccelerationStructureWriteInstanceDataNV &
      operator=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PartitionedAccelerationStructureWriteInstanceDataNV & operator=( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PartitionedAccelerationStructureWriteInstanceDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setTransform( TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setExplicitAABB( std::array<float, 6> explicitAABB_ ) VULKAN_HPP_NOEXCEPT
    {
      explicitAABB = explicitAABB_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceID( uint32_t instanceID_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceID = instanceID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceMask( uint32_t instanceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceMask = instanceMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV &
      setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV &
      setInstanceFlags( PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceFlags = instanceFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceIndex = instanceIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      partitionIndex = partitionIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV &
      setAccelerationStructure( DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPartitionedAccelerationStructureWriteInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPartitionedAccelerationStructureWriteInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWriteInstanceDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPartitionedAccelerationStructureWriteInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWriteInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPartitionedAccelerationStructureWriteInstanceDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWriteInstanceDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPartitionedAccelerationStructureWriteInstanceDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<TransformMatrixKHR const &,
               ArrayWrapper1D<float, 6> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               PartitionedAccelerationStructureInstanceFlagsNV const &,
               uint32_t const &,
               uint32_t const &,
               DeviceAddress const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( transform,
                       explicitAABB,
                       instanceID,
                       instanceMask,
                       instanceContributionToHitGroupIndex,
                       instanceFlags,
                       instanceIndex,
                       partitionIndex,
                       accelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PartitionedAccelerationStructureWriteInstanceDataNV const & ) const = default;
#else
    bool operator==( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( transform == rhs.transform ) && ( explicitAABB == rhs.explicitAABB ) && ( instanceID == rhs.instanceID ) &&
             ( instanceMask == rhs.instanceMask ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) &&
             ( instanceFlags == rhs.instanceFlags ) && ( instanceIndex == rhs.instanceIndex ) && ( partitionIndex == rhs.partitionIndex ) &&
             ( accelerationStructure == rhs.accelerationStructure );
#  endif
    }

    bool operator!=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    TransformMatrixKHR                              transform                           = {};
    ArrayWrapper1D<float, 6>                        explicitAABB                        = {};
    uint32_t                                        instanceID                          = {};
    uint32_t                                        instanceMask                        = {};
    uint32_t                                        instanceContributionToHitGroupIndex = {};
    PartitionedAccelerationStructureInstanceFlagsNV instanceFlags                       = {};
    uint32_t                                        instanceIndex                       = {};
    uint32_t                                        partitionIndex                      = {};
    DeviceAddress                                   accelerationStructure               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPartitionedAccelerationStructureWriteInstanceDataNV>
  {
    using Type = PartitionedAccelerationStructureWriteInstanceDataNV;
  };
#endif

  // wrapper struct for struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWritePartitionTranslationDataNV.html
  struct PartitionedAccelerationStructureWritePartitionTranslationDataNV
  {
    using NativeType = VkPartitionedAccelerationStructureWritePartitionTranslationDataNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PartitionedAccelerationStructureWritePartitionTranslationDataNV( uint32_t                     partitionIndex_       = {},
                                                                       std::array<float, 3> const & partitionTranslation_ = {} ) VULKAN_HPP_NOEXCEPT
      : partitionIndex{ partitionIndex_ }
      , partitionTranslation{ partitionTranslation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV(
      PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PartitionedAccelerationStructureWritePartitionTranslationDataNV( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PartitionedAccelerationStructureWritePartitionTranslationDataNV(
          *reinterpret_cast<PartitionedAccelerationStructureWritePartitionTranslationDataNV const *>( &rhs ) )
    {
    }

    PartitionedAccelerationStructureWritePartitionTranslationDataNV &
      operator=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PartitionedAccelerationStructureWritePartitionTranslationDataNV &
      operator=( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PartitionedAccelerationStructureWritePartitionTranslationDataNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      partitionIndex = partitionIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV &
      setPartitionTranslation( std::array<float, 3> partitionTranslation_ ) VULKAN_HPP_NOEXCEPT
    {
      partitionTranslation = partitionTranslation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *>( this );
    }

    operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, ArrayWrapper1D<float, 3> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( partitionIndex, partitionTranslation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & ) const = default;
#else
    bool operator==( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( partitionIndex == rhs.partitionIndex ) && ( partitionTranslation == rhs.partitionTranslation );
#  endif
    }

    bool operator!=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                 partitionIndex       = {};
    ArrayWrapper1D<float, 3> partitionTranslation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPartitionedAccelerationStructureWritePartitionTranslationDataNV>
  {
    using Type = PartitionedAccelerationStructureWritePartitionTranslationDataNV;
  };
#endif

  // wrapper struct for struct VkPresentStageTimeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentStageTimeEXT.html
  struct PresentStageTimeEXT
  {
    using NativeType = VkPresentStageTimeEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentStageTimeEXT( PresentStageFlagsEXT stage_ = {}, uint64_t time_ = {} ) VULKAN_HPP_NOEXCEPT
      : stage{ stage_ }
      , time{ time_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentStageTimeEXT( PresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentStageTimeEXT( VkPresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PresentStageTimeEXT( *reinterpret_cast<PresentStageTimeEXT const *>( &rhs ) )
    {
    }

    PresentStageTimeEXT & operator=( PresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentStageTimeEXT & operator=( VkPresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentStageTimeEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentStageTimeEXT & setStage( PresentStageFlagsEXT stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentStageTimeEXT & setTime( uint64_t time_ ) VULKAN_HPP_NOEXCEPT
    {
      time = time_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentStageTimeEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentStageTimeEXT *>( this );
    }

    operator VkPresentStageTimeEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentStageTimeEXT *>( this );
    }

    operator VkPresentStageTimeEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentStageTimeEXT *>( this );
    }

    operator VkPresentStageTimeEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentStageTimeEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PresentStageFlagsEXT const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( stage, time );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentStageTimeEXT const & ) const = default;
#else
    bool operator==( PresentStageTimeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( stage == rhs.stage ) && ( time == rhs.time );
#  endif
    }

    bool operator!=( PresentStageTimeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    PresentStageFlagsEXT stage = {};
    uint64_t             time  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentStageTimeEXT>
  {
    using Type = PresentStageTimeEXT;
  };
#endif

  // wrapper struct for struct VkPastPresentationTimingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingEXT.html
  struct PastPresentationTimingEXT
  {
    using NativeType = VkPastPresentationTimingEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePastPresentationTimingEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PastPresentationTimingEXT( uint64_t              presentId_         = {},
                                                    uint64_t              targetTime_        = {},
                                                    uint32_t              presentStageCount_ = {},
                                                    PresentStageTimeEXT * pPresentStages_    = {},
                                                    TimeDomainKHR         timeDomain_        = TimeDomainKHR::eDevice,
                                                    uint64_t              timeDomainId_      = {},
                                                    Bool32                reportComplete_    = {},
                                                    void *                pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentId{ presentId_ }
      , targetTime{ targetTime_ }
      , presentStageCount{ presentStageCount_ }
      , pPresentStages{ pPresentStages_ }
      , timeDomain{ timeDomain_ }
      , timeDomainId{ timeDomainId_ }
      , reportComplete{ reportComplete_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PastPresentationTimingEXT( PastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingEXT( VkPastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PastPresentationTimingEXT( *reinterpret_cast<PastPresentationTimingEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PastPresentationTimingEXT( uint64_t                                             presentId_,
                               uint64_t                                             targetTime_,
                               ArrayProxyNoTemporaries<PresentStageTimeEXT> const & presentStages_,
                               TimeDomainKHR                                        timeDomain_     = TimeDomainKHR::eDevice,
                               uint64_t                                             timeDomainId_   = {},
                               Bool32                                               reportComplete_ = {},
                               void *                                               pNext_          = nullptr )
      : pNext( pNext_ )
      , presentId( presentId_ )
      , targetTime( targetTime_ )
      , presentStageCount( static_cast<uint32_t>( presentStages_.size() ) )
      , pPresentStages( presentStages_.data() )
      , timeDomain( timeDomain_ )
      , timeDomainId( timeDomainId_ )
      , reportComplete( reportComplete_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PastPresentationTimingEXT & operator=( PastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PastPresentationTimingEXT & operator=( VkPastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PastPresentationTimingEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setPresentId( uint64_t presentId_ ) VULKAN_HPP_NOEXCEPT
    {
      presentId = presentId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setTargetTime( uint64_t targetTime_ ) VULKAN_HPP_NOEXCEPT
    {
      targetTime = targetTime_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setPresentStageCount( uint32_t presentStageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      presentStageCount = presentStageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setPPresentStages( PresentStageTimeEXT * pPresentStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentStages = pPresentStages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PastPresentationTimingEXT & setPresentStages( ArrayProxyNoTemporaries<PresentStageTimeEXT> const & presentStages_ ) VULKAN_HPP_NOEXCEPT
    {
      presentStageCount = static_cast<uint32_t>( presentStages_.size() );
      pPresentStages    = presentStages_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setTimeDomain( TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomain = timeDomain_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setTimeDomainId( uint64_t timeDomainId_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainId = timeDomainId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingEXT & setReportComplete( Bool32 reportComplete_ ) VULKAN_HPP_NOEXCEPT
    {
      reportComplete = reportComplete_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPastPresentationTimingEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPastPresentationTimingEXT *>( this );
    }

    operator VkPastPresentationTimingEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPastPresentationTimingEXT *>( this );
    }

    operator VkPastPresentationTimingEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPastPresentationTimingEXT *>( this );
    }

    operator VkPastPresentationTimingEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPastPresentationTimingEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint64_t const &,
               uint64_t const &,
               uint32_t const &,
               PresentStageTimeEXT * const &,
               TimeDomainKHR const &,
               uint64_t const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentId, targetTime, presentStageCount, pPresentStages, timeDomain, timeDomainId, reportComplete );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PastPresentationTimingEXT const & ) const = default;
#else
    bool operator==( PastPresentationTimingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ) && ( targetTime == rhs.targetTime ) &&
             ( presentStageCount == rhs.presentStageCount ) && ( pPresentStages == rhs.pPresentStages ) && ( timeDomain == rhs.timeDomain ) &&
             ( timeDomainId == rhs.timeDomainId ) && ( reportComplete == rhs.reportComplete );
#  endif
    }

    bool operator!=( PastPresentationTimingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType             = StructureType::ePastPresentationTimingEXT;
    void *                pNext             = {};
    uint64_t              presentId         = {};
    uint64_t              targetTime        = {};
    uint32_t              presentStageCount = {};
    PresentStageTimeEXT * pPresentStages    = {};
    TimeDomainKHR         timeDomain        = TimeDomainKHR::eDevice;
    uint64_t              timeDomainId      = {};
    Bool32                reportComplete    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPastPresentationTimingEXT>
  {
    using Type = PastPresentationTimingEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePastPresentationTimingEXT>
  {
    using Type = PastPresentationTimingEXT;
  };

  // wrapper struct for struct VkPastPresentationTimingGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingGOOGLE.html
  struct PastPresentationTimingGOOGLE
  {
    using NativeType = VkPastPresentationTimingGOOGLE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_           = {},
                                                       uint64_t desiredPresentTime_  = {},
                                                       uint64_t actualPresentTime_   = {},
                                                       uint64_t earliestPresentTime_ = {},
                                                       uint64_t presentMargin_       = {} ) VULKAN_HPP_NOEXCEPT
      : presentID{ presentID_ }
      , desiredPresentTime{ desiredPresentTime_ }
      , actualPresentTime{ actualPresentTime_ }
      , earliestPresentTime{ earliestPresentTime_ }
      , presentMargin{ presentMargin_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
    {
    }

    PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs );
      return *this;
    }

    operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE *>( this );
    }

    operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPastPresentationTimingGOOGLE *>( this );
    }

    operator VkPastPresentationTimingGOOGLE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPastPresentationTimingGOOGLE *>( this );
    }

    operator VkPastPresentationTimingGOOGLE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPastPresentationTimingGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default;
#else
    bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) &&
             ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin );
#  endif
    }

    bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t presentID           = {};
    uint64_t desiredPresentTime  = {};
    uint64_t actualPresentTime   = {};
    uint64_t earliestPresentTime = {};
    uint64_t presentMargin       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPastPresentationTimingGOOGLE>
  {
    using Type = PastPresentationTimingGOOGLE;
  };
#endif

  // wrapper struct for struct VkPastPresentationTimingInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingInfoEXT.html
  struct PastPresentationTimingInfoEXT
  {
    using NativeType = VkPastPresentationTimingInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePastPresentationTimingInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PastPresentationTimingInfoEXT( PastPresentationTimingFlagsEXT flags_     = {},
                                                        SwapchainKHR                   swapchain_ = {},
                                                        const void *                   pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , swapchain{ swapchain_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PastPresentationTimingInfoEXT( PastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingInfoEXT( VkPastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PastPresentationTimingInfoEXT( *reinterpret_cast<PastPresentationTimingInfoEXT const *>( &rhs ) )
    {
    }

    PastPresentationTimingInfoEXT & operator=( PastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PastPresentationTimingInfoEXT & operator=( VkPastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PastPresentationTimingInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setFlags( PastPresentationTimingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPastPresentationTimingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPastPresentationTimingInfoEXT *>( this );
    }

    operator VkPastPresentationTimingInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPastPresentationTimingInfoEXT *>( this );
    }

    operator VkPastPresentationTimingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPastPresentationTimingInfoEXT *>( this );
    }

    operator VkPastPresentationTimingInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPastPresentationTimingInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PastPresentationTimingFlagsEXT const &, SwapchainKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, swapchain );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PastPresentationTimingInfoEXT const & ) const = default;
#else
    bool operator==( PastPresentationTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( swapchain == rhs.swapchain );
#  endif
    }

    bool operator!=( PastPresentationTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType     = StructureType::ePastPresentationTimingInfoEXT;
    const void *                   pNext     = {};
    PastPresentationTimingFlagsEXT flags     = {};
    SwapchainKHR                   swapchain = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPastPresentationTimingInfoEXT>
  {
    using Type = PastPresentationTimingInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePastPresentationTimingInfoEXT>
  {
    using Type = PastPresentationTimingInfoEXT;
  };

  // wrapper struct for struct VkPastPresentationTimingPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingPropertiesEXT.html
  struct PastPresentationTimingPropertiesEXT
  {
    using NativeType = VkPastPresentationTimingPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePastPresentationTimingPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PastPresentationTimingPropertiesEXT( uint64_t                    timingPropertiesCounter_ = {},
                                                              uint64_t                    timeDomainsCounter_      = {},
                                                              uint32_t                    presentationTimingCount_ = {},
                                                              PastPresentationTimingEXT * pPresentationTimings_    = {},
                                                              void *                      pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , timingPropertiesCounter{ timingPropertiesCounter_ }
      , timeDomainsCounter{ timeDomainsCounter_ }
      , presentationTimingCount{ presentationTimingCount_ }
      , pPresentationTimings{ pPresentationTimings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PastPresentationTimingPropertiesEXT( PastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingPropertiesEXT( VkPastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PastPresentationTimingPropertiesEXT( *reinterpret_cast<PastPresentationTimingPropertiesEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PastPresentationTimingPropertiesEXT( uint64_t                                                   timingPropertiesCounter_,
                                         uint64_t                                                   timeDomainsCounter_,
                                         ArrayProxyNoTemporaries<PastPresentationTimingEXT> const & presentationTimings_,
                                         void *                                                     pNext_ = nullptr )
      : pNext( pNext_ )
      , timingPropertiesCounter( timingPropertiesCounter_ )
      , timeDomainsCounter( timeDomainsCounter_ )
      , presentationTimingCount( static_cast<uint32_t>( presentationTimings_.size() ) )
      , pPresentationTimings( presentationTimings_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PastPresentationTimingPropertiesEXT & operator=( PastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PastPresentationTimingPropertiesEXT & operator=( VkPastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PastPresentationTimingPropertiesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingPropertiesEXT & setTimingPropertiesCounter( uint64_t timingPropertiesCounter_ ) VULKAN_HPP_NOEXCEPT
    {
      timingPropertiesCounter = timingPropertiesCounter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingPropertiesEXT & setTimeDomainsCounter( uint64_t timeDomainsCounter_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainsCounter = timeDomainsCounter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingPropertiesEXT & setPresentationTimingCount( uint32_t presentationTimingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      presentationTimingCount = presentationTimingCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingPropertiesEXT &
      setPPresentationTimings( PastPresentationTimingEXT * pPresentationTimings_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentationTimings = pPresentationTimings_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PastPresentationTimingPropertiesEXT &
      setPresentationTimings( ArrayProxyNoTemporaries<PastPresentationTimingEXT> const & presentationTimings_ ) VULKAN_HPP_NOEXCEPT
    {
      presentationTimingCount = static_cast<uint32_t>( presentationTimings_.size() );
      pPresentationTimings    = presentationTimings_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPastPresentationTimingPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPastPresentationTimingPropertiesEXT *>( this );
    }

    operator VkPastPresentationTimingPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPastPresentationTimingPropertiesEXT *>( this );
    }

    operator VkPastPresentationTimingPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPastPresentationTimingPropertiesEXT *>( this );
    }

    operator VkPastPresentationTimingPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPastPresentationTimingPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &, uint64_t const &, uint32_t const &, PastPresentationTimingEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, timingPropertiesCounter, timeDomainsCounter, presentationTimingCount, pPresentationTimings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PastPresentationTimingPropertiesEXT const & ) const = default;
#else
    bool operator==( PastPresentationTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingPropertiesCounter == rhs.timingPropertiesCounter ) &&
             ( timeDomainsCounter == rhs.timeDomainsCounter ) && ( presentationTimingCount == rhs.presentationTimingCount ) &&
             ( pPresentationTimings == rhs.pPresentationTimings );
#  endif
    }

    bool operator!=( PastPresentationTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                   = StructureType::ePastPresentationTimingPropertiesEXT;
    void *                      pNext                   = {};
    uint64_t                    timingPropertiesCounter = {};
    uint64_t                    timeDomainsCounter      = {};
    uint32_t                    presentationTimingCount = {};
    PastPresentationTimingEXT * pPresentationTimings    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPastPresentationTimingPropertiesEXT>
  {
    using Type = PastPresentationTimingPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePastPresentationTimingPropertiesEXT>
  {
    using Type = PastPresentationTimingPropertiesEXT;
  };

  // wrapper struct for struct VkPerTileBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileBeginInfoQCOM.html
  struct PerTileBeginInfoQCOM
  {
    using NativeType = VkPerTileBeginInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerTileBeginInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerTileBeginInfoQCOM( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerTileBeginInfoQCOM( *reinterpret_cast<PerTileBeginInfoQCOM const *>( &rhs ) )
    {
    }

    PerTileBeginInfoQCOM & operator=( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerTileBeginInfoQCOM & operator=( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerTileBeginInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerTileBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerTileBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerTileBeginInfoQCOM *>( this );
    }

    operator VkPerTileBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerTileBeginInfoQCOM *>( this );
    }

    operator VkPerTileBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerTileBeginInfoQCOM *>( this );
    }

    operator VkPerTileBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerTileBeginInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerTileBeginInfoQCOM const & ) const = default;
#else
    bool operator==( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::ePerTileBeginInfoQCOM;
    const void *  pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerTileBeginInfoQCOM>
  {
    using Type = PerTileBeginInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerTileBeginInfoQCOM>
  {
    using Type = PerTileBeginInfoQCOM;
  };

  // wrapper struct for struct VkPerTileEndInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileEndInfoQCOM.html
  struct PerTileEndInfoQCOM
  {
    using NativeType = VkPerTileEndInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerTileEndInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerTileEndInfoQCOM( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PerTileEndInfoQCOM( *reinterpret_cast<PerTileEndInfoQCOM const *>( &rhs ) ) {}

    PerTileEndInfoQCOM & operator=( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerTileEndInfoQCOM & operator=( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerTileEndInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerTileEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerTileEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerTileEndInfoQCOM *>( this );
    }

    operator VkPerTileEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerTileEndInfoQCOM *>( this );
    }

    operator VkPerTileEndInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerTileEndInfoQCOM *>( this );
    }

    operator VkPerTileEndInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerTileEndInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerTileEndInfoQCOM const & ) const = default;
#else
    bool operator==( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::ePerTileEndInfoQCOM;
    const void *  pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerTileEndInfoQCOM>
  {
    using Type = PerTileEndInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerTileEndInfoQCOM>
  {
    using Type = PerTileEndInfoQCOM;
  };

  // wrapper struct for struct VkPerformanceConfigurationAcquireInfoINTEL, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationAcquireInfoINTEL.html
  struct PerformanceConfigurationAcquireInfoINTEL
  {
    using NativeType = VkPerformanceConfigurationAcquireInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceConfigurationAcquireInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(
      PerformanceConfigurationTypeINTEL type_  = PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated,
      const void *                      pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
    {
    }

    PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType( PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

    operator VkPerformanceConfigurationAcquireInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

    operator VkPerformanceConfigurationAcquireInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PerformanceConfigurationTypeINTEL const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type );
#  endif
    }

    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
    const void *                      pNext = {};
    PerformanceConfigurationTypeINTEL type  = PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceConfigurationAcquireInfoINTEL>
  {
    using Type = PerformanceConfigurationAcquireInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
  {
    using Type = PerformanceConfigurationAcquireInfoINTEL;
  };

  // wrapper struct for struct VkPerformanceCounterARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterARM.html
  struct PerformanceCounterARM
  {
    using NativeType = VkPerformanceCounterARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceCounterARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceCounterARM( uint32_t counterID_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , counterID{ counterID_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceCounterARM( PerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterARM( VkPerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterARM( *reinterpret_cast<PerformanceCounterARM const *>( &rhs ) )
    {
    }

    PerformanceCounterARM & operator=( PerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceCounterARM & operator=( VkPerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceCounterARM const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterARM *>( this );
    }

    operator VkPerformanceCounterARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterARM *>( this );
    }

    operator VkPerformanceCounterARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceCounterARM *>( this );
    }

    operator VkPerformanceCounterARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceCounterARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, counterID );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceCounterARM const & ) const = default;
#else
    bool operator==( PerformanceCounterARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterID == rhs.counterID );
#  endif
    }

    bool operator!=( PerformanceCounterARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::ePerformanceCounterARM;
    void *        pNext     = {};
    uint32_t      counterID = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceCounterARM>
  {
    using Type = PerformanceCounterARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterARM>
  {
    using Type = PerformanceCounterARM;
  };

  // wrapper struct for struct VkPerformanceCounterDescriptionARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionARM.html
  struct PerformanceCounterDescriptionARM
  {
    using NativeType = VkPerformanceCounterDescriptionARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceCounterDescriptionARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionARM( PerformanceCounterDescriptionFlagsARM             flags_ = {},
                                                              std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_  = {},
                                                              void *                                            pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , name{ name_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionARM( PerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterDescriptionARM( VkPerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterDescriptionARM( *reinterpret_cast<PerformanceCounterDescriptionARM const *>( &rhs ) )
    {
    }

    PerformanceCounterDescriptionARM & operator=( PerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceCounterDescriptionARM & operator=( VkPerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceCounterDescriptionARM const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterDescriptionARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterDescriptionARM *>( this );
    }

    operator VkPerformanceCounterDescriptionARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterDescriptionARM *>( this );
    }

    operator VkPerformanceCounterDescriptionARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceCounterDescriptionARM *>( this );
    }

    operator VkPerformanceCounterDescriptionARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceCounterDescriptionARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PerformanceCounterDescriptionFlagsARM const &, ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, name );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 );
    }

    bool operator!=( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType = StructureType::ePerformanceCounterDescriptionARM;
    void *                                        pNext = {};
    PerformanceCounterDescriptionFlagsARM         flags = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceCounterDescriptionARM>
  {
    using Type = PerformanceCounterDescriptionARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionARM>
  {
    using Type = PerformanceCounterDescriptionARM;
  };

  // wrapper struct for struct VkPerformanceCounterDescriptionKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionKHR.html
  struct PerformanceCounterDescriptionKHR
  {
    using NativeType = VkPerformanceCounterDescriptionKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceCounterDescriptionKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionFlagsKHR             flags_       = {},
                                                              std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_        = {},
                                                              std::array<char, VK_MAX_DESCRIPTION_SIZE> const & category_    = {},
                                                              std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                                              void *                                            pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , name{ name_ }
      , category{ category_ }
      , description{ description_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
    {
    }

    PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR *>( this );
    }

    operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( this );
    }

    operator VkPerformanceCounterDescriptionKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceCounterDescriptionKHR *>( this );
    }

    operator VkPerformanceCounterDescriptionKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               PerformanceCounterDescriptionFlagsKHR const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, name, category, description );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) &&
             ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 );
    }

    bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType       = StructureType::ePerformanceCounterDescriptionKHR;
    void *                                        pNext       = {};
    PerformanceCounterDescriptionFlagsKHR         flags       = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category    = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceCounterDescriptionKHR>
  {
    using Type = PerformanceCounterDescriptionKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
  {
    using Type = PerformanceCounterDescriptionKHR;
  };

  // wrapper struct for struct VkPerformanceCounterKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterKHR.html
  struct PerformanceCounterKHR
  {
    using NativeType = VkPerformanceCounterKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceCounterKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterUnitKHR                 unit_    = PerformanceCounterUnitKHR::eGeneric,
                                                   PerformanceCounterScopeKHR                scope_   = PerformanceCounterScopeKHR::eCommandBuffer,
                                                   PerformanceCounterStorageKHR              storage_ = PerformanceCounterStorageKHR::eInt32,
                                                   std::array<uint8_t, VK_UUID_SIZE> const & uuid_    = {},
                                                   void *                                    pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , unit{ unit_ }
      , scope{ scope_ }
      , storage{ storage_ }
      , uuid{ uuid_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
    {
    }

    PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceCounterKHR const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterKHR *>( this );
    }

    operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterKHR *>( this );
    }

    operator VkPerformanceCounterKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceCounterKHR *>( this );
    }

    operator VkPerformanceCounterKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceCounterKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               PerformanceCounterUnitKHR const &,
               PerformanceCounterScopeKHR const &,
               PerformanceCounterStorageKHR const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, unit, scope, storage, uuid );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceCounterKHR const & ) const = default;
#else
    bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) &&
             ( uuid == rhs.uuid );
#  endif
    }

    bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType   = StructureType::ePerformanceCounterKHR;
    void *                                pNext   = {};
    PerformanceCounterUnitKHR             unit    = PerformanceCounterUnitKHR::eGeneric;
    PerformanceCounterScopeKHR            scope   = PerformanceCounterScopeKHR::eCommandBuffer;
    PerformanceCounterStorageKHR          storage = PerformanceCounterStorageKHR::eInt32;
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceCounterKHR>
  {
    using Type = PerformanceCounterKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
  {
    using Type = PerformanceCounterKHR;
  };

  union PerformanceCounterResultKHR
  {
    using NativeType = VkPerformanceCounterResultKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
    {
      int32 = int32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
    {
      int64 = int64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
    {
      uint32 = uint32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
    {
      uint64 = uint64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
    {
      float32 = float32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
    {
      float64 = float64_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceCounterResultKHR const &() const
    {
      return *reinterpret_cast<const VkPerformanceCounterResultKHR *>( this );
    }

    operator VkPerformanceCounterResultKHR &()
    {
      return *reinterpret_cast<VkPerformanceCounterResultKHR *>( this );
    }

    int32_t  int32;
    int64_t  int64;
    uint32_t uint32;
    uint64_t uint64;
    float    float32;
    double   float64;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceCounterResultKHR>
  {
    using Type = PerformanceCounterResultKHR;
  };
#endif

  // wrapper struct for struct VkPerformanceMarkerInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceMarkerInfoINTEL.html
  struct PerformanceMarkerInfoINTEL
  {
    using NativeType = VkPerformanceMarkerInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceMarkerInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , marker{ marker_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
    {
    }

    PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
    {
      marker = marker_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceMarkerInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, marker );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
#  endif
    }

    bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::ePerformanceMarkerInfoINTEL;
    const void *  pNext  = {};
    uint64_t      marker = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceMarkerInfoINTEL>
  {
    using Type = PerformanceMarkerInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
  {
    using Type = PerformanceMarkerInfoINTEL;
  };

  // wrapper struct for struct VkPerformanceOverrideInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceOverrideInfoINTEL.html
  struct PerformanceOverrideInfoINTEL
  {
    using NativeType = VkPerformanceOverrideInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceOverrideInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideTypeINTEL type_      = PerformanceOverrideTypeINTEL::eNullHardware,
                                                       Bool32                       enable_    = {},
                                                       uint64_t                     parameter_ = {},
                                                       const void *                 pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , enable{ enable_ }
      , parameter{ parameter_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
    {
    }

    PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
    {
      enable = enable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
    {
      parameter = parameter_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( this );
    }

    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL *>( this );
    }

    operator VkPerformanceOverrideInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( this );
    }

    operator VkPerformanceOverrideInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceOverrideInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PerformanceOverrideTypeINTEL const &, Bool32 const &, uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, enable, parameter );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter );
#  endif
    }

    bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType     = StructureType::ePerformanceOverrideInfoINTEL;
    const void *                 pNext     = {};
    PerformanceOverrideTypeINTEL type      = PerformanceOverrideTypeINTEL::eNullHardware;
    Bool32                       enable    = {};
    uint64_t                     parameter = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceOverrideInfoINTEL>
  {
    using Type = PerformanceOverrideInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
  {
    using Type = PerformanceOverrideInfoINTEL;
  };

  // wrapper struct for struct VkPerformanceQuerySubmitInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceQuerySubmitInfoKHR.html
  struct PerformanceQuerySubmitInfoKHR
  {
    using NativeType = VkPerformanceQuerySubmitInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceQuerySubmitInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , counterPassIndex{ counterPassIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
    {
    }

    PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      counterPassIndex = counterPassIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>( this );
    }

    operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>( this );
    }

    operator VkPerformanceQuerySubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>( this );
    }

    operator VkPerformanceQuerySubmitInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, counterPassIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
#else
    bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex );
#  endif
    }

    bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePerformanceQuerySubmitInfoKHR;
    const void *  pNext            = {};
    uint32_t      counterPassIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceQuerySubmitInfoKHR>
  {
    using Type = PerformanceQuerySubmitInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
  {
    using Type = PerformanceQuerySubmitInfoKHR;
  };

  // wrapper struct for struct VkPerformanceStreamMarkerInfoINTEL, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceStreamMarkerInfoINTEL.html
  struct PerformanceStreamMarkerInfoINTEL
  {
    using NativeType = VkPerformanceStreamMarkerInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePerformanceStreamMarkerInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , marker{ marker_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
    {
    }

    PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
    {
      marker = marker_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceStreamMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceStreamMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, marker );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
#  endif
    }

    bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::ePerformanceStreamMarkerInfoINTEL;
    const void *  pNext  = {};
    uint32_t      marker = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceStreamMarkerInfoINTEL>
  {
    using Type = PerformanceStreamMarkerInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
  {
    using Type = PerformanceStreamMarkerInfoINTEL;
  };

  union PerformanceValueDataINTEL
  {
    using NativeType = VkPerformanceValueDataINTEL;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
    {
      value32 = value32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
    {
      value64 = value64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
    {
      valueFloat = valueFloat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
    {
      valueBool = valueBool_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT
    {
      valueString = valueString_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPerformanceValueDataINTEL const &() const
    {
      return *reinterpret_cast<const VkPerformanceValueDataINTEL *>( this );
    }

    operator VkPerformanceValueDataINTEL &()
    {
      return *reinterpret_cast<VkPerformanceValueDataINTEL *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    uint32_t     value32;
    uint64_t     value64;
    float        valueFloat;
    Bool32       valueBool;
    const char * valueString;
#else
    uint32_t     value32;
    uint64_t     value64;
    float        valueFloat;
    VkBool32     valueBool;
    const char * valueString;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceValueDataINTEL>
  {
    using Type = PerformanceValueDataINTEL;
  };
#endif

  // wrapper struct for struct VkPerformanceValueINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceValueINTEL.html
  struct PerformanceValueINTEL
  {
    using NativeType = VkPerformanceValueINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueTypeINTEL type_ = PerformanceValueTypeINTEL::eUint32,
                                                   PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
      : type{ type_ }
      , data{ data_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
    {
    }

    PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PerformanceValueINTEL const *>( &rhs );
      return *this;
    }

    operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceValueINTEL *>( this );
    }

    operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceValueINTEL *>( this );
    }

    operator VkPerformanceValueINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPerformanceValueINTEL *>( this );
    }

    operator VkPerformanceValueINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPerformanceValueINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PerformanceValueTypeINTEL const &, PerformanceValueDataINTEL const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( type, data );
    }
#endif

  public:
    PerformanceValueTypeINTEL type = PerformanceValueTypeINTEL::eUint32;
    PerformanceValueDataINTEL data = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPerformanceValueINTEL>
  {
    using Type = PerformanceValueINTEL;
  };
#endif

  // wrapper struct for struct VkPhysicalDevice16BitStorageFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice16BitStorageFeatures.html
  struct PhysicalDevice16BitStorageFeatures
  {
    using NativeType = VkPhysicalDevice16BitStorageFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevice16BitStorageFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_           = {},
                                                             Bool32 uniformAndStorageBuffer16BitAccess_ = {},
                                                             Bool32 storagePushConstant16_              = {},
                                                             Bool32 storageInputOutput16_               = {},
                                                             void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , storageBuffer16BitAccess{ storageBuffer16BitAccess_ }
      , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ }
      , storagePushConstant16{ storagePushConstant16_ }
      , storageInputOutput16{ storageInputOutput16_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer16BitAccess = storageBuffer16BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
      setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant16 = storagePushConstant16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
    {
      storageInputOutput16 = storageInputOutput16_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice16BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice16BitStorageFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
             ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
             ( storageInputOutput16 == rhs.storageInputOutput16 );
#  endif
    }

    bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDevice16BitStorageFeatures;
    void *        pNext                              = {};
    Bool32        storageBuffer16BitAccess           = {};
    Bool32        uniformAndStorageBuffer16BitAccess = {};
    Bool32        storagePushConstant16              = {};
    Bool32        storageInputOutput16               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevice16BitStorageFeatures>
  {
    using Type = PhysicalDevice16BitStorageFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
  {
    using Type = PhysicalDevice16BitStorageFeatures;
  };

  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;

  // wrapper struct for struct VkPhysicalDevice4444FormatsFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice4444FormatsFeaturesEXT.html
  struct PhysicalDevice4444FormatsFeaturesEXT
  {
    using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevice4444FormatsFeaturesEXT( Bool32 formatA4R4G4B4_ = {}, Bool32 formatA4B4G4R4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , formatA4R4G4B4{ formatA4R4G4B4_ }
      , formatA4B4G4R4{ formatA4B4G4R4_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
    {
      formatA4R4G4B4 = formatA4R4G4B4_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
    {
      formatA4B4G4R4 = formatA4B4G4R4_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDevice4444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDevice4444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
#  endif
    }

    bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
    void *        pNext          = {};
    Bool32        formatA4R4G4B4 = {};
    Bool32        formatA4B4G4R4 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevice4444FormatsFeaturesEXT>
  {
    using Type = PhysicalDevice4444FormatsFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
  {
    using Type = PhysicalDevice4444FormatsFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevice8BitStorageFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice8BitStorageFeatures.html
  struct PhysicalDevice8BitStorageFeatures
  {
    using NativeType = VkPhysicalDevice8BitStorageFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevice8BitStorageFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( Bool32 storageBuffer8BitAccess_           = {},
                                                            Bool32 uniformAndStorageBuffer8BitAccess_ = {},
                                                            Bool32 storagePushConstant8_              = {},
                                                            void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , storageBuffer8BitAccess{ storageBuffer8BitAccess_ }
      , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ }
      , storagePushConstant8{ storagePushConstant8_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer8BitAccess = storageBuffer8BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
      setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant8 = storagePushConstant8_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice8BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice8BitStorageFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevice8BitStorageFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
             ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 );
#  endif
    }

    bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDevice8BitStorageFeatures;
    void *        pNext                             = {};
    Bool32        storageBuffer8BitAccess           = {};
    Bool32        uniformAndStorageBuffer8BitAccess = {};
    Bool32        storagePushConstant8              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevice8BitStorageFeatures>
  {
    using Type = PhysicalDevice8BitStorageFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
  {
    using Type = PhysicalDevice8BitStorageFeatures;
  };

  using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;

  // wrapper struct for struct VkPhysicalDeviceASTCDecodeFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html
  struct PhysicalDeviceASTCDecodeFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( Bool32 decodeModeSharedExponent_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , decodeModeSharedExponent{ decodeModeSharedExponent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
    {
      decodeModeSharedExponent = decodeModeSharedExponent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, decodeModeSharedExponent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
#  endif
    }

    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
    void *        pNext                    = {};
    Bool32        decodeModeSharedExponent = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceASTCDecodeFeaturesEXT>
  {
    using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
  {
    using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceAccelerationStructureFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructureFeaturesKHR.html
  struct PhysicalDeviceAccelerationStructureFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( Bool32 accelerationStructure_                                 = {},
                                                                         Bool32 accelerationStructureCaptureReplay_                    = {},
                                                                         Bool32 accelerationStructureIndirectBuild_                    = {},
                                                                         Bool32 accelerationStructureHostCommands_                     = {},
                                                                         Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {},
                                                                         void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructure{ accelerationStructure_ }
      , accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ }
      , accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ }
      , accelerationStructureHostCommands{ accelerationStructureHostCommands_ }
      , descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
      setAccelerationStructureCaptureReplay( Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
      setAccelerationStructureIndirectBuild( Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
      setAccelerationStructureHostCommands( Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureHostCommands = accelerationStructureHostCommands_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
      setDescriptorBindingAccelerationStructureUpdateAfterBind( Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       accelerationStructure,
                       accelerationStructureCaptureReplay,
                       accelerationStructureIndirectBuild,
                       accelerationStructureHostCommands,
                       descriptorBindingAccelerationStructureUpdateAfterBind );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) &&
             ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) &&
             ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) &&
             ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) &&
             ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
#  endif
    }

    bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                 = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
    void *        pNext                                                 = {};
    Bool32        accelerationStructure                                 = {};
    Bool32        accelerationStructureCaptureReplay                    = {};
    Bool32        accelerationStructureIndirectBuild                    = {};
    Bool32        accelerationStructureHostCommands                     = {};
    Bool32        descriptorBindingAccelerationStructureUpdateAfterBind = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAccelerationStructureFeaturesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceAccelerationStructurePropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructurePropertiesKHR.html
  struct PhysicalDeviceAccelerationStructurePropertiesKHR
  {
    using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_                                           = {},
                                                                           uint64_t maxInstanceCount_                                           = {},
                                                                           uint64_t maxPrimitiveCount_                                          = {},
                                                                           uint32_t maxPerStageDescriptorAccelerationStructures_                = {},
                                                                           uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {},
                                                                           uint32_t maxDescriptorSetAccelerationStructures_                     = {},
                                                                           uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_      = {},
                                                                           uint32_t minAccelerationStructureScratchOffsetAlignment_             = {},
                                                                           void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxGeometryCount{ maxGeometryCount_ }
      , maxInstanceCount{ maxInstanceCount_ }
      , maxPrimitiveCount{ maxPrimitiveCount_ }
      , maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ }
      , maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ }
      , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ }
      , maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ }
      , minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxGeometryCount,
                       maxInstanceCount,
                       maxPrimitiveCount,
                       maxPerStageDescriptorAccelerationStructures,
                       maxPerStageDescriptorUpdateAfterBindAccelerationStructures,
                       maxDescriptorSetAccelerationStructures,
                       maxDescriptorSetUpdateAfterBindAccelerationStructures,
                       minAccelerationStructureScratchOffsetAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) &&
             ( maxPrimitiveCount == rhs.maxPrimitiveCount ) &&
             ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) &&
             ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) &&
             ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) &&
             ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) &&
             ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                      = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
    void *        pNext                                                      = {};
    uint64_t      maxGeometryCount                                           = {};
    uint64_t      maxInstanceCount                                           = {};
    uint64_t      maxPrimitiveCount                                          = {};
    uint32_t      maxPerStageDescriptorAccelerationStructures                = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
    uint32_t      maxDescriptorSetAccelerationStructures                     = {};
    uint32_t      maxDescriptorSetUpdateAfterBindAccelerationStructures      = {};
    uint32_t      minAccelerationStructureScratchOffsetAlignment             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAccelerationStructurePropertiesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceAddressBindingReportFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAddressBindingReportFeaturesEXT.html
  struct PhysicalDeviceAddressBindingReportFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( Bool32 reportAddressBinding_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , reportAddressBinding{ reportAddressBinding_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAddressBindingReportFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setReportAddressBinding( Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      reportAddressBinding = reportAddressBinding_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAddressBindingReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAddressBindingReportFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, reportAddressBinding );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding );
#  endif
    }

    bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT;
    void *        pNext                = {};
    Bool32        reportAddressBinding = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAddressBindingReportFeaturesEXT>
  {
    using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT>
  {
    using Type = PhysicalDeviceAddressBindingReportFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceAmigoProfilingFeaturesSEC, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAmigoProfilingFeaturesSEC.html
  struct PhysicalDeviceAmigoProfilingFeaturesSEC
  {
    using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( Bool32 amigoProfiling_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , amigoProfiling{ amigoProfiling_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast<PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs ) )
    {
    }

    PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAmigoProfilingFeaturesSEC const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT
    {
      amigoProfiling = amigoProfiling_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
    }

    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
    }

    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
    }

    operator VkPhysicalDeviceAmigoProfilingFeaturesSEC *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAmigoProfilingFeaturesSEC *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, amigoProfiling );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default;
#else
    bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling );
#  endif
    }

    bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC;
    void *        pNext          = {};
    Bool32        amigoProfiling = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAmigoProfilingFeaturesSEC>
  {
    using Type = PhysicalDeviceAmigoProfilingFeaturesSEC;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC>
  {
    using Type = PhysicalDeviceAmigoProfilingFeaturesSEC;
  };

  // wrapper struct for struct VkPhysicalDeviceAntiLagFeaturesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAntiLagFeaturesAMD.html
  struct PhysicalDeviceAntiLagFeaturesAMD
  {
    using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAntiLagFeaturesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( Bool32 antiLag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , antiLag{ antiLag_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast<PhysicalDeviceAntiLagFeaturesAMD const *>( &rhs ) )
    {
    }

    PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAntiLagFeaturesAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( Bool32 antiLag_ ) VULKAN_HPP_NOEXCEPT
    {
      antiLag = antiLag_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAntiLagFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAntiLagFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceAntiLagFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAntiLagFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceAntiLagFeaturesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAntiLagFeaturesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, antiLag );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( antiLag == rhs.antiLag );
#  endif
    }

    bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::ePhysicalDeviceAntiLagFeaturesAMD;
    void *        pNext   = {};
    Bool32        antiLag = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAntiLagFeaturesAMD>
  {
    using Type = PhysicalDeviceAntiLagFeaturesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAntiLagFeaturesAMD>
  {
    using Type = PhysicalDeviceAntiLagFeaturesAMD;
  };

  // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.html
  struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( Bool32 attachmentFeedbackLoopDynamicState_ = {},
                                                                                      void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &
      operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &
      operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &
      setAttachmentFeedbackLoopDynamicState( Bool32 attachmentFeedbackLoopDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState );
#  endif
    }

    bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
    void *        pNext                              = {};
    Bool32        attachmentFeedbackLoopDynamicState = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.html
  struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( Bool32 attachmentFeedbackLoopLayout_ = {},
                                                                                void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &
      operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &
      setAttachmentFeedbackLoopLayout( Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachmentFeedbackLoopLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout );
#  endif
    }

    bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
    void *        pNext                        = {};
    Bool32        attachmentFeedbackLoopLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
  {
    using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>
  {
    using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html
  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = {},
                                                                          void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
      setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
    {
      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, advancedBlendCoherentOperations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
#  endif
    }

    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
    void *        pNext                           = {};
    Bool32        advancedBlendCoherentOperations = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html
  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_      = {},
                                                                            Bool32   advancedBlendIndependentBlend_         = {},
                                                                            Bool32   advancedBlendNonPremultipliedSrcColor_ = {},
                                                                            Bool32   advancedBlendNonPremultipliedDstColor_ = {},
                                                                            Bool32   advancedBlendCorrelatedOverlap_        = {},
                                                                            Bool32   advancedBlendAllOperations_            = {},
                                                                            void *   pNext_                                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ }
      , advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ }
      , advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ }
      , advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ }
      , advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ }
      , advancedBlendAllOperations{ advancedBlendAllOperations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
      operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       advancedBlendMaxColorAttachments,
                       advancedBlendIndependentBlend,
                       advancedBlendNonPremultipliedSrcColor,
                       advancedBlendNonPremultipliedDstColor,
                       advancedBlendCorrelatedOverlap,
                       advancedBlendAllOperations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) &&
             ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) &&
             ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) &&
             ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) &&
             ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
#  endif
    }

    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                 = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
    void *        pNext                                 = {};
    uint32_t      advancedBlendMaxColorAttachments      = {};
    Bool32        advancedBlendIndependentBlend         = {};
    Bool32        advancedBlendNonPremultipliedSrcColor = {};
    Bool32        advancedBlendNonPremultipliedDstColor = {};
    Bool32        advancedBlendCorrelatedOverlap        = {};
    Bool32        advancedBlendAllOperations            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBorderColorSwizzleFeaturesEXT.html
  struct PhysicalDeviceBorderColorSwizzleFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( Bool32 borderColorSwizzle_          = {},
                                                                      Bool32 borderColorSwizzleFromImage_ = {},
                                                                      void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , borderColorSwizzle{ borderColorSwizzle_ }
      , borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle( Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
    {
      borderColorSwizzle = borderColorSwizzle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT &
      setBorderColorSwizzleFromImage( Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
    {
      borderColorSwizzleFromImage = borderColorSwizzleFromImage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) &&
             ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage );
#  endif
    }

    bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
    void *        pNext                       = {};
    Bool32        borderColorSwizzle          = {};
    Bool32        borderColorSwizzleFromImage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT>
  {
    using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT>
  {
    using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeatures.html
  struct PhysicalDeviceBufferDeviceAddressFeatures
  {
    using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( Bool32 bufferDeviceAddress_              = {},
                                                                    Bool32 bufferDeviceAddressCaptureReplay_ = {},
                                                                    Bool32 bufferDeviceAddressMultiDevice_   = {},
                                                                    void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , bufferDeviceAddress{ bufferDeviceAddress_ }
      , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }
      , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
      setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
      setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
#  endif
    }

    bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
    void *        pNext                            = {};
    Bool32        bufferDeviceAddress              = {};
    Bool32        bufferDeviceAddressCaptureReplay = {};
    Bool32        bufferDeviceAddressMultiDevice   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceBufferDeviceAddressFeatures>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeatures;
  };

  using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;

  // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeaturesEXT.html
  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( Bool32 bufferDeviceAddress_              = {},
                                                                       Bool32 bufferDeviceAddressCaptureReplay_ = {},
                                                                       Bool32 bufferDeviceAddressMultiDevice_   = {},
                                                                       void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , bufferDeviceAddress{ bufferDeviceAddress_ }
      , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }
      , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
      setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
      setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
#  endif
    }

    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
    void *        pNext                            = {};
    Bool32        bufferDeviceAddress              = {};
    Bool32        bufferDeviceAddressCaptureReplay = {};
    Bool32        bufferDeviceAddressMultiDevice   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
  };

  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;

  // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructureFeaturesNV.html
  struct PhysicalDeviceClusterAccelerationStructureFeaturesNV
  {
    using NativeType = VkPhysicalDeviceClusterAccelerationStructureFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( Bool32 clusterAccelerationStructure_ = {},
                                                                               void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , clusterAccelerationStructure{ clusterAccelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceClusterAccelerationStructureFeaturesNV( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceClusterAccelerationStructureFeaturesNV( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceClusterAccelerationStructureFeaturesNV( *reinterpret_cast<PhysicalDeviceClusterAccelerationStructureFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceClusterAccelerationStructureFeaturesNV &
      operator=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceClusterAccelerationStructureFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV &
      setClusterAccelerationStructure( Bool32 clusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterAccelerationStructure = clusterAccelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, clusterAccelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterAccelerationStructure == rhs.clusterAccelerationStructure );
#  endif
    }

    bool operator!=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV;
    void *        pNext                        = {};
    Bool32        clusterAccelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceClusterAccelerationStructureFeaturesNV>
  {
    using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV>
  {
    using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructurePropertiesNV.html
  struct PhysicalDeviceClusterAccelerationStructurePropertiesNV
  {
    using NativeType = VkPhysicalDeviceClusterAccelerationStructurePropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( uint32_t maxVerticesPerCluster_              = {},
                                                                                 uint32_t maxTrianglesPerCluster_             = {},
                                                                                 uint32_t clusterScratchByteAlignment_        = {},
                                                                                 uint32_t clusterByteAlignment_               = {},
                                                                                 uint32_t clusterTemplateByteAlignment_       = {},
                                                                                 uint32_t clusterBottomLevelByteAlignment_    = {},
                                                                                 uint32_t clusterTemplateBoundsByteAlignment_ = {},
                                                                                 uint32_t maxClusterGeometryIndex_            = {},
                                                                                 void *   pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxVerticesPerCluster{ maxVerticesPerCluster_ }
      , maxTrianglesPerCluster{ maxTrianglesPerCluster_ }
      , clusterScratchByteAlignment{ clusterScratchByteAlignment_ }
      , clusterByteAlignment{ clusterByteAlignment_ }
      , clusterTemplateByteAlignment{ clusterTemplateByteAlignment_ }
      , clusterBottomLevelByteAlignment{ clusterBottomLevelByteAlignment_ }
      , clusterTemplateBoundsByteAlignment{ clusterTemplateBoundsByteAlignment_ }
      , maxClusterGeometryIndex{ maxClusterGeometryIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceClusterAccelerationStructurePropertiesNV( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceClusterAccelerationStructurePropertiesNV( *reinterpret_cast<PhysicalDeviceClusterAccelerationStructurePropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceClusterAccelerationStructurePropertiesNV &
      operator=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceClusterAccelerationStructurePropertiesNV &
      operator=( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceClusterAccelerationStructurePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxVerticesPerCluster,
                       maxTrianglesPerCluster,
                       clusterScratchByteAlignment,
                       clusterByteAlignment,
                       clusterTemplateByteAlignment,
                       clusterBottomLevelByteAlignment,
                       clusterTemplateBoundsByteAlignment,
                       maxClusterGeometryIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVerticesPerCluster == rhs.maxVerticesPerCluster ) &&
             ( maxTrianglesPerCluster == rhs.maxTrianglesPerCluster ) && ( clusterScratchByteAlignment == rhs.clusterScratchByteAlignment ) &&
             ( clusterByteAlignment == rhs.clusterByteAlignment ) && ( clusterTemplateByteAlignment == rhs.clusterTemplateByteAlignment ) &&
             ( clusterBottomLevelByteAlignment == rhs.clusterBottomLevelByteAlignment ) &&
             ( clusterTemplateBoundsByteAlignment == rhs.clusterTemplateBoundsByteAlignment ) && ( maxClusterGeometryIndex == rhs.maxClusterGeometryIndex );
#  endif
    }

    bool operator!=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV;
    void *        pNext                              = {};
    uint32_t      maxVerticesPerCluster              = {};
    uint32_t      maxTrianglesPerCluster             = {};
    uint32_t      clusterScratchByteAlignment        = {};
    uint32_t      clusterByteAlignment               = {};
    uint32_t      clusterTemplateByteAlignment       = {};
    uint32_t      clusterBottomLevelByteAlignment    = {};
    uint32_t      clusterTemplateBoundsByteAlignment = {};
    uint32_t      maxClusterGeometryIndex            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceClusterAccelerationStructurePropertiesNV>
  {
    using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV>
  {
    using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI.html
  struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI
  {
    using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( Bool32 clustercullingShader_          = {},
                                                                           Bool32 multiviewClusterCullingShader_ = {},
                                                                           void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , clustercullingShader{ clustercullingShader_ }
      , multiviewClusterCullingShader{ multiviewClusterCullingShader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setClustercullingShader( Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT
    {
      clustercullingShader = clustercullingShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI &
      setMultiviewClusterCullingShader( Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewClusterCullingShader = multiviewClusterCullingShader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clustercullingShader == rhs.clustercullingShader ) &&
             ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader );
#  endif
    }

    bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
    void *        pNext                         = {};
    Bool32        clustercullingShader          = {};
    Bool32        multiviewClusterCullingShader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI.html
  struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI
  {
    using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( std::array<uint32_t, 3> const & maxWorkGroupCount_             = {},
                                                                                std::array<uint32_t, 3> const & maxWorkGroupSize_              = {},
                                                                                uint32_t                        maxOutputClusterCount_         = {},
                                                                                DeviceSize                      indirectBufferOffsetAlignment_ = {},
                                                                                void *                          pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxWorkGroupCount{ maxWorkGroupCount_ }
      , maxWorkGroupSize{ maxWorkGroupSize_ }
      , maxOutputClusterCount{ maxOutputClusterCount_ }
      , indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI &
      operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWorkGroupCount == rhs.maxWorkGroupCount ) &&
             ( maxWorkGroupSize == rhs.maxWorkGroupSize ) && ( maxOutputClusterCount == rhs.maxOutputClusterCount ) &&
             ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                         = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
    void *                      pNext                         = {};
    ArrayWrapper1D<uint32_t, 3> maxWorkGroupCount             = {};
    ArrayWrapper1D<uint32_t, 3> maxWorkGroupSize              = {};
    uint32_t                    maxOutputClusterCount         = {};
    DeviceSize                  indirectBufferOffsetAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.html
  struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI
  {
    using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( Bool32 clusterShadingRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , clusterShadingRate{ clusterShadingRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &
      operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setClusterShadingRate( Bool32 clusterShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      clusterShadingRate = clusterShadingRate_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, clusterShadingRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterShadingRate == rhs.clusterShadingRate );
#  endif
    }

    bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
    void *        pNext              = {};
    Bool32        clusterShadingRate = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceCoherentMemoryFeaturesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoherentMemoryFeaturesAMD.html
  struct PhysicalDeviceCoherentMemoryFeaturesAMD
  {
    using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( Bool32 deviceCoherentMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceCoherentMemory{ deviceCoherentMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
    {
    }

    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceCoherentMemory = deviceCoherentMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceCoherentMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
#  endif
    }

    bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
    void *        pNext                = {};
    Bool32        deviceCoherentMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCoherentMemoryFeaturesAMD>
  {
    using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
  {
    using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
  };

  // wrapper struct for struct VkPhysicalDeviceColorWriteEnableFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceColorWriteEnableFeaturesEXT.html
  struct PhysicalDeviceColorWriteEnableFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( Bool32 colorWriteEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorWriteEnable{ colorWriteEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable( Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      colorWriteEnable = colorWriteEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorWriteEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable );
#  endif
    }

    bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
    void *        pNext            = {};
    Bool32        colorWriteEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceColorWriteEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCommandBufferInheritanceFeaturesNV.html
  struct PhysicalDeviceCommandBufferInheritanceFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( Bool32 commandBufferInheritance_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , commandBufferInheritance{ commandBufferInheritance_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast<PhysicalDeviceCommandBufferInheritanceFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCommandBufferInheritanceFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV &
      setCommandBufferInheritance( Bool32 commandBufferInheritance_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferInheritance = commandBufferInheritance_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, commandBufferInheritance );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBufferInheritance == rhs.commandBufferInheritance );
#  endif
    }

    bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV;
    void *        pNext                    = {};
    Bool32        commandBufferInheritance = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCommandBufferInheritanceFeaturesNV>
  {
    using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV>
  {
    using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR.html
  struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( Bool32 computeDerivativeGroupQuads_  = {},
                                                                            Bool32 computeDerivativeGroupLinear_ = {},
                                                                            void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ }
      , computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceComputeShaderDerivativesFeaturesKHR &
      operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR &
      setComputeDerivativeGroupQuads( Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
    {
      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR &
      setComputeDerivativeGroupLinear( Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
    {
      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) &&
             ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
#  endif
    }

    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR;
    void *        pNext                        = {};
    Bool32        computeDerivativeGroupQuads  = {};
    Bool32        computeDerivativeGroupLinear = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR>
  {
    using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR>
  {
    using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR;
  };

  using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR;

  // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR.html
  struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( Bool32 meshAndTaskShaderDerivatives_ = {},
                                                                              void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceComputeShaderDerivativesPropertiesKHR &
      operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, meshAndTaskShaderDerivatives );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives );
#  endif
    }

    bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR;
    void *        pNext                        = {};
    Bool32        meshAndTaskShaderDerivatives = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR>
  {
    using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR>
  {
    using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceConditionalRenderingFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html
  struct PhysicalDeviceConditionalRenderingFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( Bool32 conditionalRendering_          = {},
                                                                        Bool32 inheritedConditionalRendering_ = {},
                                                                        void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , conditionalRendering{ conditionalRendering_ }
      , inheritedConditionalRendering{ inheritedConditionalRendering_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      conditionalRendering = conditionalRendering_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &
      setInheritedConditionalRendering( Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedConditionalRendering = inheritedConditionalRendering_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) &&
             ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
#  endif
    }

    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
    void *        pNext                         = {};
    Bool32        conditionalRendering          = {};
    Bool32        inheritedConditionalRendering = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceConditionalRenderingFeaturesEXT>
  {
    using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
  {
    using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html
  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float  primitiveOverestimationSize_                 = {},
                                                                               float  maxExtraPrimitiveOverestimationSize_         = {},
                                                                               float  extraPrimitiveOverestimationSizeGranularity_ = {},
                                                                               Bool32 primitiveUnderestimation_                    = {},
                                                                               Bool32 conservativePointAndLineRasterization_       = {},
                                                                               Bool32 degenerateTrianglesRasterized_               = {},
                                                                               Bool32 degenerateLinesRasterized_                   = {},
                                                                               Bool32 fullyCoveredFragmentShaderInputVariable_     = {},
                                                                               Bool32 conservativeRasterizationPostDepthCoverage_  = {},
                                                                               void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , primitiveOverestimationSize{ primitiveOverestimationSize_ }
      , maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ }
      , extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ }
      , primitiveUnderestimation{ primitiveUnderestimation_ }
      , conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ }
      , degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ }
      , degenerateLinesRasterized{ degenerateLinesRasterized_ }
      , fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ }
      , conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceConservativeRasterizationPropertiesEXT &
      operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               float const &,
               float const &,
               float const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       primitiveOverestimationSize,
                       maxExtraPrimitiveOverestimationSize,
                       extraPrimitiveOverestimationSizeGranularity,
                       primitiveUnderestimation,
                       conservativePointAndLineRasterization,
                       degenerateTrianglesRasterized,
                       degenerateLinesRasterized,
                       fullyCoveredFragmentShaderInputVariable,
                       conservativeRasterizationPostDepthCoverage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) &&
             ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) &&
             ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) &&
             ( primitiveUnderestimation == rhs.primitiveUnderestimation ) &&
             ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) &&
             ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) &&
             ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) &&
             ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
#  endif
    }

    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                       = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
    void *        pNext                                       = {};
    float         primitiveOverestimationSize                 = {};
    float         maxExtraPrimitiveOverestimationSize         = {};
    float         extraPrimitiveOverestimationSizeGranularity = {};
    Bool32        primitiveUnderestimation                    = {};
    Bool32        conservativePointAndLineRasterization       = {};
    Bool32        degenerateTrianglesRasterized               = {};
    Bool32        degenerateLinesRasterized                   = {};
    Bool32        fullyCoveredFragmentShaderInputVariable     = {};
    Bool32        conservativeRasterizationPostDepthCoverage  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceConservativeRasterizationPropertiesEXT>
  {
    using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
  {
    using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2FeaturesNV.html
  struct PhysicalDeviceCooperativeMatrix2FeaturesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( Bool32 cooperativeMatrixWorkgroupScope_       = {},
                                                                     Bool32 cooperativeMatrixFlexibleDimensions_   = {},
                                                                     Bool32 cooperativeMatrixReductions_           = {},
                                                                     Bool32 cooperativeMatrixConversions_          = {},
                                                                     Bool32 cooperativeMatrixPerElementOperations_ = {},
                                                                     Bool32 cooperativeMatrixTensorAddressing_     = {},
                                                                     Bool32 cooperativeMatrixBlockLoads_           = {},
                                                                     void * pNext_                                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ }
      , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ }
      , cooperativeMatrixReductions{ cooperativeMatrixReductions_ }
      , cooperativeMatrixConversions{ cooperativeMatrixConversions_ }
      , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ }
      , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ }
      , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrix2FeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrix2FeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixWorkgroupScope( Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixFlexibleDimensions( Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixReductions( Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixReductions = cooperativeMatrixReductions_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixConversions( Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixConversions = cooperativeMatrixConversions_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixPerElementOperations( Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixTensorAddressing( Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV &
      setCooperativeMatrixBlockLoads( Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2FeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2FeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2FeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2FeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       cooperativeMatrixWorkgroupScope,
                       cooperativeMatrixFlexibleDimensions,
                       cooperativeMatrixReductions,
                       cooperativeMatrixConversions,
                       cooperativeMatrixPerElementOperations,
                       cooperativeMatrixTensorAddressing,
                       cooperativeMatrixBlockLoads );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) &&
             ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) &&
             ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) &&
             ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) &&
             ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) &&
             ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                 = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV;
    void *        pNext                                 = {};
    Bool32        cooperativeMatrixWorkgroupScope       = {};
    Bool32        cooperativeMatrixFlexibleDimensions   = {};
    Bool32        cooperativeMatrixReductions           = {};
    Bool32        cooperativeMatrixConversions          = {};
    Bool32        cooperativeMatrixPerElementOperations = {};
    Bool32        cooperativeMatrixTensorAddressing     = {};
    Bool32        cooperativeMatrixBlockLoads           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrix2FeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2PropertiesNV.html
  struct PhysicalDeviceCooperativeMatrix2PropertiesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_     = {},
                                                                       uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_     = {},
                                                                       uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {},
                                                                       void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ }
      , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ }
      , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrix2PropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrix2PropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2PropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2PropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrix2PropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrix2PropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       cooperativeMatrixWorkgroupScopeMaxWorkgroupSize,
                       cooperativeMatrixFlexibleDimensionsMaxDimension,
                       cooperativeMatrixWorkgroupScopeReservedSharedMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) &&
             ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) &&
             ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                               = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV;
    void *        pNext                                               = {};
    uint32_t      cooperativeMatrixWorkgroupScopeMaxWorkgroupSize     = {};
    uint32_t      cooperativeMatrixFlexibleDimensionsMaxDimension     = {};
    uint32_t      cooperativeMatrixWorkgroupScopeReservedSharedMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrix2PropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesKHR.html
  struct PhysicalDeviceCooperativeMatrixFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( Bool32 cooperativeMatrix_                   = {},
                                                                     Bool32 cooperativeMatrixRobustBufferAccess_ = {},
                                                                     void * pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrix{ cooperativeMatrix_ }
      , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setCooperativeMatrix( Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrix = cooperativeMatrix_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR &
      setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) &&
             ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR;
    void *        pNext                               = {};
    Bool32        cooperativeMatrix                   = {};
    Bool32        cooperativeMatrixRobustBufferAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrixFeaturesKHR>
  {
    using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR>
  {
    using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesNV.html
  struct PhysicalDeviceCooperativeMatrixFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( Bool32 cooperativeMatrix_                   = {},
                                                                    Bool32 cooperativeMatrixRobustBufferAccess_ = {},
                                                                    void * pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrix{ cooperativeMatrix_ }
      , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrix = cooperativeMatrix_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV &
      setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) &&
             ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
    void *        pNext                               = {};
    Bool32        cooperativeMatrix                   = {};
    Bool32        cooperativeMatrixRobustBufferAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrixFeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesKHR.html
  struct PhysicalDeviceCooperativeMatrixPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( ShaderStageFlags cooperativeMatrixSupportedStages_ = {},
                                                                       void *           pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ShaderStageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                            = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR;
    void *           pNext                            = {};
    ShaderStageFlags cooperativeMatrixSupportedStages = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrixPropertiesKHR>
  {
    using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR>
  {
    using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesNV.html
  struct PhysicalDeviceCooperativeMatrixPropertiesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( ShaderStageFlags cooperativeMatrixSupportedStages_ = {},
                                                                      void *           pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ShaderStageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                            = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
    void *           pNext                            = {};
    ShaderStageFlags cooperativeMatrixSupportedStages = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeMatrixPropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeVectorFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorFeaturesNV.html
  struct PhysicalDeviceCooperativeVectorFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeVectorFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( Bool32 cooperativeVector_         = {},
                                                                    Bool32 cooperativeVectorTraining_ = {},
                                                                    void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeVector{ cooperativeVector_ }
      , cooperativeVectorTraining{ cooperativeVectorTraining_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeVectorFeaturesNV( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeVectorFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeVectorFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeVectorFeaturesNV & operator=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeVectorFeaturesNV & operator=( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeVectorFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVector( Bool32 cooperativeVector_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeVector = cooperativeVector_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVectorTraining( Bool32 cooperativeVectorTraining_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeVectorTraining = cooperativeVectorTraining_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCooperativeVectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeVectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeVectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeVectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeVectorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cooperativeVector, cooperativeVectorTraining );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeVectorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVector == rhs.cooperativeVector ) &&
             ( cooperativeVectorTraining == rhs.cooperativeVectorTraining );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV;
    void *        pNext                     = {};
    Bool32        cooperativeVector         = {};
    Bool32        cooperativeVectorTraining = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeVectorFeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeVectorFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeVectorFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCooperativeVectorPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorPropertiesNV.html
  struct PhysicalDeviceCooperativeVectorPropertiesNV
  {
    using NativeType = VkPhysicalDeviceCooperativeVectorPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( ShaderStageFlags cooperativeVectorSupportedStages_             = {},
                                                                      Bool32           cooperativeVectorTrainingFloat16Accumulation_ = {},
                                                                      Bool32           cooperativeVectorTrainingFloat32Accumulation_ = {},
                                                                      uint32_t         maxCooperativeVectorComponents_               = {},
                                                                      void *           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cooperativeVectorSupportedStages{ cooperativeVectorSupportedStages_ }
      , cooperativeVectorTrainingFloat16Accumulation{ cooperativeVectorTrainingFloat16Accumulation_ }
      , cooperativeVectorTrainingFloat32Accumulation{ cooperativeVectorTrainingFloat32Accumulation_ }
      , maxCooperativeVectorComponents{ maxCooperativeVectorComponents_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeVectorPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeVectorPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCooperativeVectorPropertiesNV & operator=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCooperativeVectorPropertiesNV & operator=( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCooperativeVectorPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeVectorPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeVectorPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCooperativeVectorPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCooperativeVectorPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ShaderStageFlags const &, Bool32 const &, Bool32 const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       cooperativeVectorSupportedStages,
                       cooperativeVectorTrainingFloat16Accumulation,
                       cooperativeVectorTrainingFloat32Accumulation,
                       maxCooperativeVectorComponents );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeVectorPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVectorSupportedStages == rhs.cooperativeVectorSupportedStages ) &&
             ( cooperativeVectorTrainingFloat16Accumulation == rhs.cooperativeVectorTrainingFloat16Accumulation ) &&
             ( cooperativeVectorTrainingFloat32Accumulation == rhs.cooperativeVectorTrainingFloat32Accumulation ) &&
             ( maxCooperativeVectorComponents == rhs.maxCooperativeVectorComponents );
#  endif
    }

    bool operator!=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                                        = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV;
    void *           pNext                                        = {};
    ShaderStageFlags cooperativeVectorSupportedStages             = {};
    Bool32           cooperativeVectorTrainingFloat16Accumulation = {};
    Bool32           cooperativeVectorTrainingFloat32Accumulation = {};
    uint32_t         maxCooperativeVectorComponents               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCooperativeVectorPropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeVectorPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeVectorPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR.html
  struct PhysicalDeviceCopyMemoryIndirectFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesKHR( Bool32 indirectMemoryCopy_        = {},
                                                                      Bool32 indirectMemoryToImageCopy_ = {},
                                                                      void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , indirectMemoryCopy{ indirectMemoryCopy_ }
      , indirectMemoryToImageCopy{ indirectMemoryToImageCopy_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesKHR( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCopyMemoryIndirectFeaturesKHR( VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCopyMemoryIndirectFeaturesKHR( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceCopyMemoryIndirectFeaturesKHR & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCopyMemoryIndirectFeaturesKHR & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setIndirectMemoryCopy( Bool32 indirectMemoryCopy_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectMemoryCopy = indirectMemoryCopy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setIndirectMemoryToImageCopy( Bool32 indirectMemoryToImageCopy_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectMemoryToImageCopy = indirectMemoryToImageCopy_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, indirectMemoryCopy, indirectMemoryToImageCopy );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectMemoryCopy == rhs.indirectMemoryCopy ) &&
             ( indirectMemoryToImageCopy == rhs.indirectMemoryToImageCopy );
#  endif
    }

    bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesKHR;
    void *        pNext                     = {};
    Bool32        indirectMemoryCopy        = {};
    Bool32        indirectMemoryToImageCopy = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesKHR>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesNV.html
  struct PhysicalDeviceCopyMemoryIndirectFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( Bool32 indirectCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , indirectCopy{ indirectCopy_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCopy = indirectCopy_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, indirectCopy );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy );
#  endif
    }

    bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV;
    void *        pNext        = {};
    Bool32        indirectCopy = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCopyMemoryIndirectFeaturesNV>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR.html
  struct PhysicalDeviceCopyMemoryIndirectPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesKHR( QueueFlags supportedQueues_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportedQueues{ supportedQueues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceCopyMemoryIndirectPropertiesKHR( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCopyMemoryIndirectPropertiesKHR( VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCopyMemoryIndirectPropertiesKHR( *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceCopyMemoryIndirectPropertiesKHR & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCopyMemoryIndirectPropertiesKHR & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCopyMemoryIndirectPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, QueueFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportedQueues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues );
#  endif
    }

    bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesKHR;
    void *        pNext           = {};
    QueueFlags    supportedQueues = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesKHR>
  {
    using Type = PhysicalDeviceCopyMemoryIndirectPropertiesKHR;
  };

  using PhysicalDeviceCopyMemoryIndirectPropertiesNV = PhysicalDeviceCopyMemoryIndirectPropertiesKHR;

  // wrapper struct for struct VkPhysicalDeviceCornerSampledImageFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html
  struct PhysicalDeviceCornerSampledImageFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( Bool32 cornerSampledImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cornerSampledImage{ cornerSampledImage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
    {
      cornerSampledImage = cornerSampledImage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cornerSampledImage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage );
#  endif
    }

    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
    void *        pNext              = {};
    Bool32        cornerSampledImage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCornerSampledImageFeaturesNV>
  {
    using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
  {
    using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCoverageReductionModeFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoverageReductionModeFeaturesNV.html
  struct PhysicalDeviceCoverageReductionModeFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( Bool32 coverageReductionMode_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , coverageReductionMode{ coverageReductionMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageReductionMode = coverageReductionMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, coverageReductionMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode );
#  endif
    }

    bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
    void *        pNext                 = {};
    Bool32        coverageReductionMode = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCoverageReductionModeFeaturesNV>
  {
    using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
  {
    using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceCubicClampFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicClampFeaturesQCOM.html
  struct PhysicalDeviceCubicClampFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cubicRangeClamp{ cubicRangeClamp_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast<PhysicalDeviceCubicClampFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCubicClampFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( Bool32 cubicRangeClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      cubicRangeClamp = cubicRangeClamp_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCubicClampFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCubicClampFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicClampFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCubicClampFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicClampFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCubicClampFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cubicRangeClamp );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp );
#  endif
    }

    bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM;
    void *        pNext           = {};
    Bool32        cubicRangeClamp = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCubicClampFeaturesQCOM>
  {
    using Type = PhysicalDeviceCubicClampFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCubicClampFeaturesQCOM>
  {
    using Type = PhysicalDeviceCubicClampFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceCubicWeightsFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicWeightsFeaturesQCOM.html
  struct PhysicalDeviceCubicWeightsFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( Bool32 selectableCubicWeights_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , selectableCubicWeights{ selectableCubicWeights_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast<PhysicalDeviceCubicWeightsFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCubicWeightsFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setSelectableCubicWeights( Bool32 selectableCubicWeights_ ) VULKAN_HPP_NOEXCEPT
    {
      selectableCubicWeights = selectableCubicWeights_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCubicWeightsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCubicWeightsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCubicWeightsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceCubicWeightsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCubicWeightsFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, selectableCubicWeights );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( selectableCubicWeights == rhs.selectableCubicWeights );
#  endif
    }

    bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM;
    void *        pNext                  = {};
    Bool32        selectableCubicWeights = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCubicWeightsFeaturesQCOM>
  {
    using Type = PhysicalDeviceCubicWeightsFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM>
  {
    using Type = PhysicalDeviceCubicWeightsFeaturesQCOM;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchFeaturesNV.html
  struct PhysicalDeviceCudaKernelLaunchFeaturesNV
  {
    using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( Bool32 cudaKernelLaunchFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCudaKernelLaunchFeaturesNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setCudaKernelLaunchFeatures( Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchFeaturesNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cudaKernelLaunchFeatures );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default;
#  else
    bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures );
#    endif
    }

    bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV;
    void *        pNext                    = {};
    Bool32        cudaKernelLaunchFeatures = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCudaKernelLaunchFeaturesNV>
  {
    using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV>
  {
    using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchPropertiesNV.html
  struct PhysicalDeviceCudaKernelLaunchPropertiesNV
  {
    using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {},
                                                                     uint32_t computeCapabilityMajor_ = {},
                                                                     void *   pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , computeCapabilityMinor{ computeCapabilityMinor_ }
      , computeCapabilityMajor{ computeCapabilityMajor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast<PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCudaKernelLaunchPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCudaKernelLaunchPropertiesNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default;
#  else
    bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) &&
             ( computeCapabilityMajor == rhs.computeCapabilityMajor );
#    endif
    }

    bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV;
    void *        pNext                  = {};
    uint32_t      computeCapabilityMinor = {};
    uint32_t      computeCapabilityMajor = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCudaKernelLaunchPropertiesNV>
  {
    using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV>
  {
    using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDeviceCustomBorderColorFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorFeaturesEXT.html
  struct PhysicalDeviceCustomBorderColorFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( Bool32 customBorderColors_             = {},
                                                                     Bool32 customBorderColorWithoutFormat_ = {},
                                                                     void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , customBorderColors{ customBorderColors_ }
      , customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColors = customBorderColors_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
      setCustomBorderColorWithoutFormat( Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) &&
             ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
#  endif
    }

    bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
    void *        pNext                          = {};
    Bool32        customBorderColors             = {};
    Bool32        customBorderColorWithoutFormat = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCustomBorderColorFeaturesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceCustomBorderColorPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorPropertiesEXT.html
  struct PhysicalDeviceCustomBorderColorPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {},
                                                                       void *   pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxCustomBorderColorSamplers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
#  endif
    }

    bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
    void *        pNext                        = {};
    uint32_t      maxCustomBorderColorSamplers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCustomBorderColorPropertiesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceCustomResolveFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomResolveFeaturesEXT.html
  struct PhysicalDeviceCustomResolveFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceCustomResolveFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceCustomResolveFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomResolveFeaturesEXT( Bool32 customResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , customResolve{ customResolve_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomResolveFeaturesEXT( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomResolveFeaturesEXT( VkPhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCustomResolveFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomResolveFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceCustomResolveFeaturesEXT & operator=( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceCustomResolveFeaturesEXT & operator=( VkPhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceCustomResolveFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT & setCustomResolve( Bool32 customResolve_ ) VULKAN_HPP_NOEXCEPT
    {
      customResolve = customResolve_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceCustomResolveFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCustomResolveFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomResolveFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCustomResolveFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomResolveFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceCustomResolveFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomResolveFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceCustomResolveFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, customResolve );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCustomResolveFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customResolve == rhs.customResolve );
#  endif
    }

    bool operator!=( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceCustomResolveFeaturesEXT;
    void *        pNext         = {};
    Bool32        customResolve = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceCustomResolveFeaturesEXT>
  {
    using Type = PhysicalDeviceCustomResolveFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomResolveFeaturesEXT>
  {
    using Type = PhysicalDeviceCustomResolveFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDataGraphFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphFeaturesARM.html
  struct PhysicalDeviceDataGraphFeaturesARM
  {
    using NativeType = VkPhysicalDeviceDataGraphFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDataGraphFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphFeaturesARM( Bool32 dataGraph_                        = {},
                                                             Bool32 dataGraphUpdateAfterBind_         = {},
                                                             Bool32 dataGraphSpecializationConstants_ = {},
                                                             Bool32 dataGraphDescriptorBuffer_        = {},
                                                             Bool32 dataGraphShaderModule_            = {},
                                                             void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataGraph{ dataGraph_ }
      , dataGraphUpdateAfterBind{ dataGraphUpdateAfterBind_ }
      , dataGraphSpecializationConstants{ dataGraphSpecializationConstants_ }
      , dataGraphDescriptorBuffer{ dataGraphDescriptorBuffer_ }
      , dataGraphShaderModule{ dataGraphShaderModule_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphFeaturesARM( PhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDataGraphFeaturesARM( VkPhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDataGraphFeaturesARM( *reinterpret_cast<PhysicalDeviceDataGraphFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceDataGraphFeaturesARM & operator=( PhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDataGraphFeaturesARM & operator=( VkPhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDataGraphFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraph( Bool32 dataGraph_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraph = dataGraph_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphUpdateAfterBind( Bool32 dataGraphUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphUpdateAfterBind = dataGraphUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM &
      setDataGraphSpecializationConstants( Bool32 dataGraphSpecializationConstants_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphSpecializationConstants = dataGraphSpecializationConstants_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphDescriptorBuffer( Bool32 dataGraphDescriptorBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphDescriptorBuffer = dataGraphDescriptorBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphShaderModule( Bool32 dataGraphShaderModule_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphShaderModule = dataGraphShaderModule_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDataGraphFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDataGraphFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDataGraphFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDataGraphFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDataGraphFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDataGraphFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataGraph, dataGraphUpdateAfterBind, dataGraphSpecializationConstants, dataGraphDescriptorBuffer, dataGraphShaderModule );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDataGraphFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceDataGraphFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraph == rhs.dataGraph ) &&
             ( dataGraphUpdateAfterBind == rhs.dataGraphUpdateAfterBind ) && ( dataGraphSpecializationConstants == rhs.dataGraphSpecializationConstants ) &&
             ( dataGraphDescriptorBuffer == rhs.dataGraphDescriptorBuffer ) && ( dataGraphShaderModule == rhs.dataGraphShaderModule );
#  endif
    }

    bool operator!=( PhysicalDeviceDataGraphFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceDataGraphFeaturesARM;
    void *        pNext                            = {};
    Bool32        dataGraph                        = {};
    Bool32        dataGraphUpdateAfterBind         = {};
    Bool32        dataGraphSpecializationConstants = {};
    Bool32        dataGraphDescriptorBuffer        = {};
    Bool32        dataGraphShaderModule            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDataGraphFeaturesARM>
  {
    using Type = PhysicalDeviceDataGraphFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDataGraphFeaturesARM>
  {
    using Type = PhysicalDeviceDataGraphFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceDataGraphModelFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphModelFeaturesQCOM.html
  struct PhysicalDeviceDataGraphModelFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceDataGraphModelFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDataGraphModelFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphModelFeaturesQCOM( Bool32 dataGraphModel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataGraphModel{ dataGraphModel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphModelFeaturesQCOM( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDataGraphModelFeaturesQCOM( VkPhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDataGraphModelFeaturesQCOM( *reinterpret_cast<PhysicalDeviceDataGraphModelFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceDataGraphModelFeaturesQCOM & operator=( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDataGraphModelFeaturesQCOM & operator=( VkPhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDataGraphModelFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM & setDataGraphModel( Bool32 dataGraphModel_ ) VULKAN_HPP_NOEXCEPT
    {
      dataGraphModel = dataGraphModel_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDataGraphModelFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDataGraphModelFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceDataGraphModelFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDataGraphModelFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceDataGraphModelFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDataGraphModelFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceDataGraphModelFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDataGraphModelFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataGraphModel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDataGraphModelFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraphModel == rhs.dataGraphModel );
#  endif
    }

    bool operator!=( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceDataGraphModelFeaturesQCOM;
    void *        pNext          = {};
    Bool32        dataGraphModel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDataGraphModelFeaturesQCOM>
  {
    using Type = PhysicalDeviceDataGraphModelFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDataGraphModelFeaturesQCOM>
  {
    using Type = PhysicalDeviceDataGraphModelFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV.html
  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( Bool32 dedicatedAllocationImageAliasing_ = {},
                                                                                   void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
      operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
      operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
      setDedicatedAllocationImageAliasing( Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dedicatedAllocationImageAliasing );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
#  endif
    }

    bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
    void *        pNext                            = {};
    Bool32        dedicatedAllocationImageAliasing = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
  {
    using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
  {
    using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX.html
  struct PhysicalDeviceDenseGeometryFormatFeaturesAMDX
  {
    using NativeType = VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDenseGeometryFormatFeaturesAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDenseGeometryFormatFeaturesAMDX( Bool32 denseGeometryFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , denseGeometryFormat{ denseGeometryFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDenseGeometryFormatFeaturesAMDX( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDenseGeometryFormatFeaturesAMDX( VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDenseGeometryFormatFeaturesAMDX( *reinterpret_cast<PhysicalDeviceDenseGeometryFormatFeaturesAMDX const *>( &rhs ) )
    {
    }

    PhysicalDeviceDenseGeometryFormatFeaturesAMDX & operator=( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDenseGeometryFormatFeaturesAMDX & operator=( VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDenseGeometryFormatFeaturesAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX & setDenseGeometryFormat( Bool32 denseGeometryFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      denseGeometryFormat = denseGeometryFormat_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, denseGeometryFormat );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & ) const = default;
#  else
    bool operator==( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denseGeometryFormat == rhs.denseGeometryFormat );
#    endif
    }

    bool operator!=( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceDenseGeometryFormatFeaturesAMDX;
    void *        pNext               = {};
    Bool32        denseGeometryFormat = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX>
  {
    using Type = PhysicalDeviceDenseGeometryFormatFeaturesAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDenseGeometryFormatFeaturesAMDX>
  {
    using Type = PhysicalDeviceDenseGeometryFormatFeaturesAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDeviceDepthBiasControlFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthBiasControlFeaturesEXT.html
  struct PhysicalDeviceDepthBiasControlFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( Bool32 depthBiasControl_                                = {},
                                                                    Bool32 leastRepresentableValueForceUnormRepresentation_ = {},
                                                                    Bool32 floatRepresentation_                             = {},
                                                                    Bool32 depthBiasExact_                                  = {},
                                                                    void * pNext_                                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthBiasControl{ depthBiasControl_ }
      , leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ }
      , floatRepresentation{ floatRepresentation_ }
      , depthBiasExact{ depthBiasExact_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthBiasControlFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthBiasControlFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasControl( Bool32 depthBiasControl_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasControl = depthBiasControl_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT &
      setLeastRepresentableValueForceUnormRepresentation( Bool32 leastRepresentableValueForceUnormRepresentation_ ) VULKAN_HPP_NOEXCEPT
    {
      leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setFloatRepresentation( Bool32 floatRepresentation_ ) VULKAN_HPP_NOEXCEPT
    {
      floatRepresentation = floatRepresentation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasExact = depthBiasExact_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthBiasControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthBiasControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthBiasControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthBiasControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthBiasControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasControl == rhs.depthBiasControl ) &&
             ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation ) &&
             ( floatRepresentation == rhs.floatRepresentation ) && ( depthBiasExact == rhs.depthBiasExact );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                           = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT;
    void *        pNext                                           = {};
    Bool32        depthBiasControl                                = {};
    Bool32        leastRepresentableValueForceUnormRepresentation = {};
    Bool32        floatRepresentation                             = {};
    Bool32        depthBiasExact                                  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthBiasControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthBiasControlFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthBiasControlFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDepthClampControlFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampControlFeaturesEXT.html
  struct PhysicalDeviceDepthClampControlFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( Bool32 depthClampControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthClampControl{ depthClampControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClampControlFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthClampControlFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setDepthClampControl( Bool32 depthClampControl_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClampControl = depthClampControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthClampControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthClampControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClampControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthClampControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClampControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthClampControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthClampControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampControl == rhs.depthClampControl );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT;
    void *        pNext             = {};
    Bool32        depthClampControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthClampControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClampControlFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClampControlFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampZeroOneFeaturesKHR.html
  struct PhysicalDeviceDepthClampZeroOneFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( Bool32 depthClampZeroOne_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthClampZeroOne{ depthClampZeroOne_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClampZeroOneFeaturesKHR( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthClampZeroOneFeaturesKHR( *reinterpret_cast<PhysicalDeviceDepthClampZeroOneFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthClampZeroOneFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setDepthClampZeroOne( Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClampZeroOne = depthClampZeroOne_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthClampZeroOne );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR;
    void *        pNext             = {};
    Bool32        depthClampZeroOne = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthClampZeroOneFeaturesKHR>
  {
    using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR>
  {
    using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR;
  };

  using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR;

  // wrapper struct for struct VkPhysicalDeviceDepthClipControlFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipControlFeaturesEXT.html
  struct PhysicalDeviceDepthClipControlFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( Bool32 depthClipControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthClipControl{ depthClipControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl( Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClipControl = depthClipControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthClipControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
    void *        pNext            = {};
    Bool32        depthClipControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthClipControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClipControlFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClipControlFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDepthClipEnableFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipEnableFeaturesEXT.html
  struct PhysicalDeviceDepthClipEnableFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( Bool32 depthClipEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthClipEnable{ depthClipEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClipEnable = depthClipEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthClipEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
    void *        pNext           = {};
    Bool32        depthClipEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthClipEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDepthStencilResolveProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthStencilResolveProperties.html
  struct PhysicalDeviceDepthStencilResolveProperties
  {
    using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDepthStencilResolveProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( ResolveModeFlags supportedDepthResolveModes_   = {},
                                                                      ResolveModeFlags supportedStencilResolveModes_ = {},
                                                                      Bool32           independentResolveNone_       = {},
                                                                      Bool32           independentResolve_           = {},
                                                                      void *           pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportedDepthResolveModes{ supportedDepthResolveModes_ }
      , supportedStencilResolveModes{ supportedStencilResolveModes_ }
      , independentResolveNone{ independentResolveNone_ }
      , independentResolve{ independentResolve_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ResolveModeFlags const &, ResolveModeFlags const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
             ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) &&
             ( independentResolve == rhs.independentResolve );
#  endif
    }

    bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                        = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
    void *           pNext                        = {};
    ResolveModeFlags supportedDepthResolveModes   = {};
    ResolveModeFlags supportedStencilResolveModes = {};
    Bool32           independentResolveNone       = {};
    Bool32           independentResolve           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDepthStencilResolveProperties>
  {
    using Type = PhysicalDeviceDepthStencilResolveProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
  {
    using Type = PhysicalDeviceDepthStencilResolveProperties;
  };

  using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;

  // wrapper struct for struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT.html
  struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {},
                                                                                void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &
      operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, size_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                        = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
    void *        pNext                                        = {};
    size_t        combinedImageSamplerDensityMapDescriptorSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorBufferFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferFeaturesEXT.html
  struct PhysicalDeviceDescriptorBufferFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( Bool32 descriptorBuffer_                   = {},
                                                                    Bool32 descriptorBufferCaptureReplay_      = {},
                                                                    Bool32 descriptorBufferImageLayoutIgnored_ = {},
                                                                    Bool32 descriptorBufferPushDescriptors_    = {},
                                                                    void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorBuffer{ descriptorBuffer_ }
      , descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ }
      , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ }
      , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorBufferFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBuffer( Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBuffer = descriptorBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT &
      setDescriptorBufferCaptureReplay( Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT &
      setDescriptorBufferImageLayoutIgnored( Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT &
      setDescriptorBufferPushDescriptors( Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorBufferFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBuffer == rhs.descriptorBuffer ) &&
             ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay ) &&
             ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored ) &&
             ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT;
    void *        pNext                              = {};
    Bool32        descriptorBuffer                   = {};
    Bool32        descriptorBufferCaptureReplay      = {};
    Bool32        descriptorBufferImageLayoutIgnored = {};
    Bool32        descriptorBufferPushDescriptors    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorBufferFeaturesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorBufferPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferPropertiesEXT.html
  struct PhysicalDeviceDescriptorBufferPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( Bool32     combinedImageSamplerDescriptorSingleArray_            = {},
                                                                      Bool32     bufferlessPushDescriptors_                            = {},
                                                                      Bool32     allowSamplerImageViewPostSubmitCreation_              = {},
                                                                      DeviceSize descriptorBufferOffsetAlignment_                      = {},
                                                                      uint32_t   maxDescriptorBufferBindings_                          = {},
                                                                      uint32_t   maxResourceDescriptorBufferBindings_                  = {},
                                                                      uint32_t   maxSamplerDescriptorBufferBindings_                   = {},
                                                                      uint32_t   maxEmbeddedImmutableSamplerBindings_                  = {},
                                                                      uint32_t   maxEmbeddedImmutableSamplers_                         = {},
                                                                      size_t     bufferCaptureReplayDescriptorDataSize_                = {},
                                                                      size_t     imageCaptureReplayDescriptorDataSize_                 = {},
                                                                      size_t     imageViewCaptureReplayDescriptorDataSize_             = {},
                                                                      size_t     samplerCaptureReplayDescriptorDataSize_               = {},
                                                                      size_t     accelerationStructureCaptureReplayDescriptorDataSize_ = {},
                                                                      size_t     samplerDescriptorSize_                                = {},
                                                                      size_t     combinedImageSamplerDescriptorSize_                   = {},
                                                                      size_t     sampledImageDescriptorSize_                           = {},
                                                                      size_t     storageImageDescriptorSize_                           = {},
                                                                      size_t     uniformTexelBufferDescriptorSize_                     = {},
                                                                      size_t     robustUniformTexelBufferDescriptorSize_               = {},
                                                                      size_t     storageTexelBufferDescriptorSize_                     = {},
                                                                      size_t     robustStorageTexelBufferDescriptorSize_               = {},
                                                                      size_t     uniformBufferDescriptorSize_                          = {},
                                                                      size_t     robustUniformBufferDescriptorSize_                    = {},
                                                                      size_t     storageBufferDescriptorSize_                          = {},
                                                                      size_t     robustStorageBufferDescriptorSize_                    = {},
                                                                      size_t     inputAttachmentDescriptorSize_                        = {},
                                                                      size_t     accelerationStructureDescriptorSize_                  = {},
                                                                      DeviceSize maxSamplerDescriptorBufferRange_                      = {},
                                                                      DeviceSize maxResourceDescriptorBufferRange_                     = {},
                                                                      DeviceSize samplerDescriptorBufferAddressSpaceSize_              = {},
                                                                      DeviceSize resourceDescriptorBufferAddressSpaceSize_             = {},
                                                                      DeviceSize descriptorBufferAddressSpaceSize_                     = {},
                                                                      void *     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ }
      , bufferlessPushDescriptors{ bufferlessPushDescriptors_ }
      , allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ }
      , descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ }
      , maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ }
      , maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ }
      , maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ }
      , maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ }
      , maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ }
      , bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ }
      , imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ }
      , imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ }
      , samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ }
      , accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ }
      , samplerDescriptorSize{ samplerDescriptorSize_ }
      , combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ }
      , sampledImageDescriptorSize{ sampledImageDescriptorSize_ }
      , storageImageDescriptorSize{ storageImageDescriptorSize_ }
      , uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ }
      , robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ }
      , storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ }
      , robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ }
      , uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ }
      , robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ }
      , storageBufferDescriptorSize{ storageBufferDescriptorSize_ }
      , robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ }
      , inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ }
      , accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ }
      , maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ }
      , maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ }
      , samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ }
      , resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ }
      , descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast<PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorBufferPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorBufferPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               size_t const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       combinedImageSamplerDescriptorSingleArray,
                       bufferlessPushDescriptors,
                       allowSamplerImageViewPostSubmitCreation,
                       descriptorBufferOffsetAlignment,
                       maxDescriptorBufferBindings,
                       maxResourceDescriptorBufferBindings,
                       maxSamplerDescriptorBufferBindings,
                       maxEmbeddedImmutableSamplerBindings,
                       maxEmbeddedImmutableSamplers,
                       bufferCaptureReplayDescriptorDataSize,
                       imageCaptureReplayDescriptorDataSize,
                       imageViewCaptureReplayDescriptorDataSize,
                       samplerCaptureReplayDescriptorDataSize,
                       accelerationStructureCaptureReplayDescriptorDataSize,
                       samplerDescriptorSize,
                       combinedImageSamplerDescriptorSize,
                       sampledImageDescriptorSize,
                       storageImageDescriptorSize,
                       uniformTexelBufferDescriptorSize,
                       robustUniformTexelBufferDescriptorSize,
                       storageTexelBufferDescriptorSize,
                       robustStorageTexelBufferDescriptorSize,
                       uniformBufferDescriptorSize,
                       robustUniformBufferDescriptorSize,
                       storageBufferDescriptorSize,
                       robustStorageBufferDescriptorSize,
                       inputAttachmentDescriptorSize,
                       accelerationStructureDescriptorSize,
                       maxSamplerDescriptorBufferRange,
                       maxResourceDescriptorBufferRange,
                       samplerDescriptorBufferAddressSpaceSize,
                       resourceDescriptorBufferAddressSpaceSize,
                       descriptorBufferAddressSpaceSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray ) &&
             ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors ) &&
             ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation ) &&
             ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment ) && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings ) &&
             ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings ) &&
             ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings ) &&
             ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings ) &&
             ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers ) &&
             ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize ) &&
             ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize ) &&
             ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize ) &&
             ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize ) &&
             ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize ) &&
             ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize ) &&
             ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize ) && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize ) &&
             ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize ) &&
             ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize ) &&
             ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize ) &&
             ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize ) &&
             ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize ) &&
             ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize ) &&
             ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize ) &&
             ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize ) &&
             ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize ) &&
             ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize ) &&
             ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange ) &&
             ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange ) &&
             ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize ) &&
             ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize ) &&
             ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT;
    void *        pNext                                                = {};
    Bool32        combinedImageSamplerDescriptorSingleArray            = {};
    Bool32        bufferlessPushDescriptors                            = {};
    Bool32        allowSamplerImageViewPostSubmitCreation              = {};
    DeviceSize    descriptorBufferOffsetAlignment                      = {};
    uint32_t      maxDescriptorBufferBindings                          = {};
    uint32_t      maxResourceDescriptorBufferBindings                  = {};
    uint32_t      maxSamplerDescriptorBufferBindings                   = {};
    uint32_t      maxEmbeddedImmutableSamplerBindings                  = {};
    uint32_t      maxEmbeddedImmutableSamplers                         = {};
    size_t        bufferCaptureReplayDescriptorDataSize                = {};
    size_t        imageCaptureReplayDescriptorDataSize                 = {};
    size_t        imageViewCaptureReplayDescriptorDataSize             = {};
    size_t        samplerCaptureReplayDescriptorDataSize               = {};
    size_t        accelerationStructureCaptureReplayDescriptorDataSize = {};
    size_t        samplerDescriptorSize                                = {};
    size_t        combinedImageSamplerDescriptorSize                   = {};
    size_t        sampledImageDescriptorSize                           = {};
    size_t        storageImageDescriptorSize                           = {};
    size_t        uniformTexelBufferDescriptorSize                     = {};
    size_t        robustUniformTexelBufferDescriptorSize               = {};
    size_t        storageTexelBufferDescriptorSize                     = {};
    size_t        robustStorageTexelBufferDescriptorSize               = {};
    size_t        uniformBufferDescriptorSize                          = {};
    size_t        robustUniformBufferDescriptorSize                    = {};
    size_t        storageBufferDescriptorSize                          = {};
    size_t        robustStorageBufferDescriptorSize                    = {};
    size_t        inputAttachmentDescriptorSize                        = {};
    size_t        accelerationStructureDescriptorSize                  = {};
    DeviceSize    maxSamplerDescriptorBufferRange                      = {};
    DeviceSize    maxResourceDescriptorBufferRange                     = {};
    DeviceSize    samplerDescriptorBufferAddressSpaceSize              = {};
    DeviceSize    resourceDescriptorBufferAddressSpaceSize             = {};
    DeviceSize    descriptorBufferAddressSpaceSize                     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorBufferPropertiesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT>
  {
    using Type = PhysicalDeviceDescriptorBufferPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorFeaturesARM.html
  struct PhysicalDeviceDescriptorBufferTensorFeaturesARM
  {
    using NativeType = VkPhysicalDeviceDescriptorBufferTensorFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorFeaturesARM( Bool32 descriptorBufferTensorDescriptors_ = {},
                                                                          void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorBufferTensorDescriptors{ descriptorBufferTensorDescriptors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDescriptorBufferTensorFeaturesARM( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorBufferTensorFeaturesARM( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorBufferTensorFeaturesARM( *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM &
      setDescriptorBufferTensorDescriptors( Bool32 descriptorBufferTensorDescriptors_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBufferTensorDescriptors = descriptorBufferTensorDescriptors_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorBufferTensorDescriptors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBufferTensorDescriptors == rhs.descriptorBufferTensorDescriptors );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM;
    void *        pNext                             = {};
    Bool32        descriptorBufferTensorDescriptors = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorBufferTensorFeaturesARM>
  {
    using Type = PhysicalDeviceDescriptorBufferTensorFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM>
  {
    using Type = PhysicalDeviceDescriptorBufferTensorFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorPropertiesARM.html
  struct PhysicalDeviceDescriptorBufferTensorPropertiesARM
  {
    using NativeType = VkPhysicalDeviceDescriptorBufferTensorPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorPropertiesARM( size_t tensorCaptureReplayDescriptorDataSize_     = {},
                                                                            size_t tensorViewCaptureReplayDescriptorDataSize_ = {},
                                                                            size_t tensorDescriptorSize_                      = {},
                                                                            void * pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorCaptureReplayDescriptorDataSize{ tensorCaptureReplayDescriptorDataSize_ }
      , tensorViewCaptureReplayDescriptorDataSize{ tensorViewCaptureReplayDescriptorDataSize_ }
      , tensorDescriptorSize{ tensorDescriptorSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDescriptorBufferTensorPropertiesARM( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorBufferTensorPropertiesARM( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorBufferTensorPropertiesARM( *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorBufferTensorPropertiesARM &
      operator=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorBufferTensorPropertiesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorBufferTensorPropertiesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM &
      setTensorCaptureReplayDescriptorDataSize( size_t tensorCaptureReplayDescriptorDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorCaptureReplayDescriptorDataSize = tensorCaptureReplayDescriptorDataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM &
      setTensorViewCaptureReplayDescriptorDataSize( size_t tensorViewCaptureReplayDescriptorDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorViewCaptureReplayDescriptorDataSize = tensorViewCaptureReplayDescriptorDataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorDescriptorSize( size_t tensorDescriptorSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorDescriptorSize = tensorDescriptorSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, size_t const &, size_t const &, size_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorCaptureReplayDescriptorDataSize, tensorViewCaptureReplayDescriptorDataSize, tensorDescriptorSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorCaptureReplayDescriptorDataSize == rhs.tensorCaptureReplayDescriptorDataSize ) &&
             ( tensorViewCaptureReplayDescriptorDataSize == rhs.tensorViewCaptureReplayDescriptorDataSize ) &&
             ( tensorDescriptorSize == rhs.tensorDescriptorSize );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                     = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM;
    void *        pNext                                     = {};
    size_t        tensorCaptureReplayDescriptorDataSize     = {};
    size_t        tensorViewCaptureReplayDescriptorDataSize = {};
    size_t        tensorDescriptorSize                      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorBufferTensorPropertiesARM>
  {
    using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM>
  {
    using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html
  struct PhysicalDeviceDescriptorIndexingFeatures
  {
    using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( Bool32 shaderInputAttachmentArrayDynamicIndexing_          = {},
                                                                   Bool32 shaderUniformTexelBufferArrayDynamicIndexing_       = {},
                                                                   Bool32 shaderStorageTexelBufferArrayDynamicIndexing_       = {},
                                                                   Bool32 shaderUniformBufferArrayNonUniformIndexing_         = {},
                                                                   Bool32 shaderSampledImageArrayNonUniformIndexing_          = {},
                                                                   Bool32 shaderStorageBufferArrayNonUniformIndexing_         = {},
                                                                   Bool32 shaderStorageImageArrayNonUniformIndexing_          = {},
                                                                   Bool32 shaderInputAttachmentArrayNonUniformIndexing_       = {},
                                                                   Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_    = {},
                                                                   Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_    = {},
                                                                   Bool32 descriptorBindingUniformBufferUpdateAfterBind_      = {},
                                                                   Bool32 descriptorBindingSampledImageUpdateAfterBind_       = {},
                                                                   Bool32 descriptorBindingStorageImageUpdateAfterBind_       = {},
                                                                   Bool32 descriptorBindingStorageBufferUpdateAfterBind_      = {},
                                                                   Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
                                                                   Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
                                                                   Bool32 descriptorBindingUpdateUnusedWhilePending_          = {},
                                                                   Bool32 descriptorBindingPartiallyBound_                    = {},
                                                                   Bool32 descriptorBindingVariableDescriptorCount_           = {},
                                                                   Bool32 runtimeDescriptorArray_                             = {},
                                                                   void * pNext_                                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ }
      , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ }
      , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ }
      , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ }
      , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ }
      , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ }
      , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ }
      , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ }
      , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ }
      , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ }
      , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ }
      , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ }
      , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ }
      , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ }
      , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ }
      , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ }
      , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ }
      , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ }
      , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ }
      , runtimeDescriptorArray{ runtimeDescriptorArray_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
      setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
    {
      runtimeDescriptorArray = runtimeDescriptorArray_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderInputAttachmentArrayDynamicIndexing,
                       shaderUniformTexelBufferArrayDynamicIndexing,
                       shaderStorageTexelBufferArrayDynamicIndexing,
                       shaderUniformBufferArrayNonUniformIndexing,
                       shaderSampledImageArrayNonUniformIndexing,
                       shaderStorageBufferArrayNonUniformIndexing,
                       shaderStorageImageArrayNonUniformIndexing,
                       shaderInputAttachmentArrayNonUniformIndexing,
                       shaderUniformTexelBufferArrayNonUniformIndexing,
                       shaderStorageTexelBufferArrayNonUniformIndexing,
                       descriptorBindingUniformBufferUpdateAfterBind,
                       descriptorBindingSampledImageUpdateAfterBind,
                       descriptorBindingStorageImageUpdateAfterBind,
                       descriptorBindingStorageBufferUpdateAfterBind,
                       descriptorBindingUniformTexelBufferUpdateAfterBind,
                       descriptorBindingStorageTexelBufferUpdateAfterBind,
                       descriptorBindingUpdateUnusedWhilePending,
                       descriptorBindingPartiallyBound,
                       descriptorBindingVariableDescriptorCount,
                       runtimeDescriptorArray );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
             ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
             ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
             ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
             ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
             ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
             ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
             ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
             ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
             ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
             ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
             ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
             ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
             ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
             ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
             ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
             ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
             ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                              = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
    void *        pNext                                              = {};
    Bool32        shaderInputAttachmentArrayDynamicIndexing          = {};
    Bool32        shaderUniformTexelBufferArrayDynamicIndexing       = {};
    Bool32        shaderStorageTexelBufferArrayDynamicIndexing       = {};
    Bool32        shaderUniformBufferArrayNonUniformIndexing         = {};
    Bool32        shaderSampledImageArrayNonUniformIndexing          = {};
    Bool32        shaderStorageBufferArrayNonUniformIndexing         = {};
    Bool32        shaderStorageImageArrayNonUniformIndexing          = {};
    Bool32        shaderInputAttachmentArrayNonUniformIndexing       = {};
    Bool32        shaderUniformTexelBufferArrayNonUniformIndexing    = {};
    Bool32        shaderStorageTexelBufferArrayNonUniformIndexing    = {};
    Bool32        descriptorBindingUniformBufferUpdateAfterBind      = {};
    Bool32        descriptorBindingSampledImageUpdateAfterBind       = {};
    Bool32        descriptorBindingStorageImageUpdateAfterBind       = {};
    Bool32        descriptorBindingStorageBufferUpdateAfterBind      = {};
    Bool32        descriptorBindingUniformTexelBufferUpdateAfterBind = {};
    Bool32        descriptorBindingStorageTexelBufferUpdateAfterBind = {};
    Bool32        descriptorBindingUpdateUnusedWhilePending          = {};
    Bool32        descriptorBindingPartiallyBound                    = {};
    Bool32        descriptorBindingVariableDescriptorCount           = {};
    Bool32        runtimeDescriptorArray                             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorIndexingFeatures>
  {
    using Type = PhysicalDeviceDescriptorIndexingFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
  {
    using Type = PhysicalDeviceDescriptorIndexingFeatures;
  };

  using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;

  // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingProperties.html
  struct PhysicalDeviceDescriptorIndexingProperties
  {
    using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorIndexingProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_              = {},
                                                                     Bool32   shaderUniformBufferArrayNonUniformIndexingNative_     = {},
                                                                     Bool32   shaderSampledImageArrayNonUniformIndexingNative_      = {},
                                                                     Bool32   shaderStorageBufferArrayNonUniformIndexingNative_     = {},
                                                                     Bool32   shaderStorageImageArrayNonUniformIndexingNative_      = {},
                                                                     Bool32   shaderInputAttachmentArrayNonUniformIndexingNative_   = {},
                                                                     Bool32   robustBufferAccessUpdateAfterBind_                    = {},
                                                                     Bool32   quadDivergentImplicitLod_                             = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_         = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_   = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_   = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_    = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_    = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
                                                                     uint32_t maxPerStageUpdateAfterBindResources_                  = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindSamplers_              = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_        = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_        = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindSampledImages_         = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindStorageImages_         = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_      = {},
                                                                     void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ }
      , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ }
      , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ }
      , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ }
      , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ }
      , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ }
      , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ }
      , quadDivergentImplicitLod{ quadDivergentImplicitLod_ }
      , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ }
      , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ }
      , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ }
      , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ }
      , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ }
      , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ }
      , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ }
      , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ }
      , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ }
      , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ }
      , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ }
      , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ }
      , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxUpdateAfterBindDescriptorsInAllPools,
                       shaderUniformBufferArrayNonUniformIndexingNative,
                       shaderSampledImageArrayNonUniformIndexingNative,
                       shaderStorageBufferArrayNonUniformIndexingNative,
                       shaderStorageImageArrayNonUniformIndexingNative,
                       shaderInputAttachmentArrayNonUniformIndexingNative,
                       robustBufferAccessUpdateAfterBind,
                       quadDivergentImplicitLod,
                       maxPerStageDescriptorUpdateAfterBindSamplers,
                       maxPerStageDescriptorUpdateAfterBindUniformBuffers,
                       maxPerStageDescriptorUpdateAfterBindStorageBuffers,
                       maxPerStageDescriptorUpdateAfterBindSampledImages,
                       maxPerStageDescriptorUpdateAfterBindStorageImages,
                       maxPerStageDescriptorUpdateAfterBindInputAttachments,
                       maxPerStageUpdateAfterBindResources,
                       maxDescriptorSetUpdateAfterBindSamplers,
                       maxDescriptorSetUpdateAfterBindUniformBuffers,
                       maxDescriptorSetUpdateAfterBindUniformBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindStorageBuffers,
                       maxDescriptorSetUpdateAfterBindStorageBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindSampledImages,
                       maxDescriptorSetUpdateAfterBindStorageImages,
                       maxDescriptorSetUpdateAfterBindInputAttachments );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
             ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
             ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
             ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
             ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
             ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
             ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
             ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
             ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
             ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
             ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
             ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
             ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
    void *        pNext                                                = {};
    uint32_t      maxUpdateAfterBindDescriptorsInAllPools              = {};
    Bool32        shaderUniformBufferArrayNonUniformIndexingNative     = {};
    Bool32        shaderSampledImageArrayNonUniformIndexingNative      = {};
    Bool32        shaderStorageBufferArrayNonUniformIndexingNative     = {};
    Bool32        shaderStorageImageArrayNonUniformIndexingNative      = {};
    Bool32        shaderInputAttachmentArrayNonUniformIndexingNative   = {};
    Bool32        robustBufferAccessUpdateAfterBind                    = {};
    Bool32        quadDivergentImplicitLod                             = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindSamplers         = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindUniformBuffers   = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindStorageBuffers   = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindSampledImages    = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindStorageImages    = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
    uint32_t      maxPerStageUpdateAfterBindResources                  = {};
    uint32_t      maxDescriptorSetUpdateAfterBindSamplers              = {};
    uint32_t      maxDescriptorSetUpdateAfterBindUniformBuffers        = {};
    uint32_t      maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
    uint32_t      maxDescriptorSetUpdateAfterBindStorageBuffers        = {};
    uint32_t      maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
    uint32_t      maxDescriptorSetUpdateAfterBindSampledImages         = {};
    uint32_t      maxDescriptorSetUpdateAfterBindStorageImages         = {};
    uint32_t      maxDescriptorSetUpdateAfterBindInputAttachments      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorIndexingProperties>
  {
    using Type = PhysicalDeviceDescriptorIndexingProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
  {
    using Type = PhysicalDeviceDescriptorIndexingProperties;
  };

  using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;

  // wrapper struct for struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV.html
  struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( Bool32 descriptorPoolOverallocation_ = {},
                                                                               void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorPoolOverallocation{ descriptorPoolOverallocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast<PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorPoolOverallocationFeaturesNV &
      operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV &
      setDescriptorPoolOverallocation( Bool32 descriptorPoolOverallocation_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorPoolOverallocation = descriptorPoolOverallocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorPoolOverallocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
    void *        pNext                        = {};
    Bool32        descriptorPoolOverallocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV>
  {
    using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV>
  {
    using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE.html
  struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE
  {
    using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( Bool32 descriptorSetHostMapping_ = {},
                                                                              void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorSetHostMapping{ descriptorSetHostMapping_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs ) )
    {
    }

    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &
      operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &
      setDescriptorSetHostMapping( Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetHostMapping = descriptorSetHostMapping_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorSetHostMapping );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping );
#  endif
    }

    bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
    void *        pNext                    = {};
    Bool32        descriptorSetHostMapping = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
  {
    using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
  {
    using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.html
  struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( Bool32 deviceGeneratedCompute_              = {},
                                                                                 Bool32 deviceGeneratedComputePipelines_     = {},
                                                                                 Bool32 deviceGeneratedComputeCaptureReplay_ = {},
                                                                                 void * pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceGeneratedCompute{ deviceGeneratedCompute_ }
      , deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ }
      , deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
      operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
      operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
      setDeviceGeneratedCompute( Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedCompute = deviceGeneratedCompute_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
      setDeviceGeneratedComputePipelines( Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &
      setDeviceGeneratedComputeCaptureReplay( Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) &&
             ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) &&
             ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
    void *        pNext                               = {};
    Bool32        deviceGeneratedCompute              = {};
    Bool32        deviceGeneratedComputePipelines     = {};
    Bool32        deviceGeneratedComputeCaptureReplay = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT.html
  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( Bool32 deviceGeneratedCommands_        = {},
                                                                           Bool32 dynamicGeneratedPipelineLayout_ = {},
                                                                           void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceGeneratedCommands{ deviceGeneratedCommands_ }
      , dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedCommands = deviceGeneratedCommands_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &
      setDynamicGeneratedPipelineLayout( Bool32 dynamicGeneratedPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) &&
             ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
    void *        pNext                          = {};
    Bool32        deviceGeneratedCommands        = {};
    Bool32        dynamicGeneratedPipelineLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV.html
  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( Bool32 deviceGeneratedCommands_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceGeneratedCommands{ deviceGeneratedCommands_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedCommands = deviceGeneratedCommands_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceGeneratedCommands );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
    void *        pNext                   = {};
    Bool32        deviceGeneratedCommands = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT.html
  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( uint32_t                          maxIndirectPipelineCount_              = {},
                                                                             uint32_t                          maxIndirectShaderObjectCount_          = {},
                                                                             uint32_t                          maxIndirectSequenceCount_              = {},
                                                                             uint32_t                          maxIndirectCommandsTokenCount_         = {},
                                                                             uint32_t                          maxIndirectCommandsTokenOffset_        = {},
                                                                             uint32_t                          maxIndirectCommandsIndirectStride_     = {},
                                                                             IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_   = {},
                                                                             ShaderStageFlags                  supportedIndirectCommandsShaderStages_ = {},
                                                                             ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_   = {},
                                                                             ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_     = {},
                                                                             Bool32           deviceGeneratedCommandsTransformFeedback_               = {},
                                                                             Bool32           deviceGeneratedCommandsMultiDrawIndirectCount_          = {},
                                                                             void *           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxIndirectPipelineCount{ maxIndirectPipelineCount_ }
      , maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ }
      , maxIndirectSequenceCount{ maxIndirectSequenceCount_ }
      , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ }
      , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ }
      , maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ }
      , supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ }
      , supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ }
      , supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ }
      , supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ }
      , deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ }
      , deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &
      operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               IndirectCommandsInputModeFlagsEXT const &,
               ShaderStageFlags const &,
               ShaderStageFlags const &,
               ShaderStageFlags const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxIndirectPipelineCount,
                       maxIndirectShaderObjectCount,
                       maxIndirectSequenceCount,
                       maxIndirectCommandsTokenCount,
                       maxIndirectCommandsTokenOffset,
                       maxIndirectCommandsIndirectStride,
                       supportedIndirectCommandsInputModes,
                       supportedIndirectCommandsShaderStages,
                       supportedIndirectCommandsShaderStagesPipelineBinding,
                       supportedIndirectCommandsShaderStagesShaderBinding,
                       deviceGeneratedCommandsTransformFeedback,
                       deviceGeneratedCommandsMultiDrawIndirectCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount ) &&
             ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) &&
             ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) &&
             ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) &&
             ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride ) &&
             ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes ) &&
             ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages ) &&
             ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding ) &&
             ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding ) &&
             ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback ) &&
             ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                                                = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
    void *                            pNext                                                = {};
    uint32_t                          maxIndirectPipelineCount                             = {};
    uint32_t                          maxIndirectShaderObjectCount                         = {};
    uint32_t                          maxIndirectSequenceCount                             = {};
    uint32_t                          maxIndirectCommandsTokenCount                        = {};
    uint32_t                          maxIndirectCommandsTokenOffset                       = {};
    uint32_t                          maxIndirectCommandsIndirectStride                    = {};
    IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes                  = {};
    ShaderStageFlags                  supportedIndirectCommandsShaderStages                = {};
    ShaderStageFlags                  supportedIndirectCommandsShaderStagesPipelineBinding = {};
    ShaderStageFlags                  supportedIndirectCommandsShaderStagesShaderBinding   = {};
    Bool32                            deviceGeneratedCommandsTransformFeedback             = {};
    Bool32                            deviceGeneratedCommandsMultiDrawIndirectCount        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV.html
  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
  {
    using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_              = {},
                                                                            uint32_t maxIndirectSequenceCount_                 = {},
                                                                            uint32_t maxIndirectCommandsTokenCount_            = {},
                                                                            uint32_t maxIndirectCommandsStreamCount_           = {},
                                                                            uint32_t maxIndirectCommandsTokenOffset_           = {},
                                                                            uint32_t maxIndirectCommandsStreamStride_          = {},
                                                                            uint32_t minSequencesCountBufferOffsetAlignment_   = {},
                                                                            uint32_t minSequencesIndexBufferOffsetAlignment_   = {},
                                                                            uint32_t minIndirectCommandsBufferOffsetAlignment_ = {},
                                                                            void *   pNext_                                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ }
      , maxIndirectSequenceCount{ maxIndirectSequenceCount_ }
      , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ }
      , maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ }
      , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ }
      , maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ }
      , minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ }
      , minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ }
      , minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &
      operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxGraphicsShaderGroupCount,
                       maxIndirectSequenceCount,
                       maxIndirectCommandsTokenCount,
                       maxIndirectCommandsStreamCount,
                       maxIndirectCommandsTokenOffset,
                       maxIndirectCommandsStreamStride,
                       minSequencesCountBufferOffsetAlignment,
                       minSequencesIndexBufferOffsetAlignment,
                       minIndirectCommandsBufferOffsetAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) &&
             ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) &&
             ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) &&
             ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) &&
             ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) &&
             ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) &&
             ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) &&
             ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                    = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
    void *        pNext                                    = {};
    uint32_t      maxGraphicsShaderGroupCount              = {};
    uint32_t      maxIndirectSequenceCount                 = {};
    uint32_t      maxIndirectCommandsTokenCount            = {};
    uint32_t      maxIndirectCommandsStreamCount           = {};
    uint32_t      maxIndirectCommandsTokenOffset           = {};
    uint32_t      maxIndirectCommandsStreamStride          = {};
    uint32_t      minSequencesCountBufferOffsetAlignment   = {};
    uint32_t      minSequencesIndexBufferOffsetAlignment   = {};
    uint32_t      minIndirectCommandsBufferOffsetAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceMemoryReportFeaturesEXT.html
  struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( Bool32 deviceMemoryReport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceMemoryReport{ deviceMemoryReport_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMemoryReport = deviceMemoryReport_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceMemoryReport );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport );
#  endif
    }

    bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
    void *        pNext              = {};
    Bool32        deviceMemoryReport = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT>
  {
    using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
  {
    using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiagnosticsConfigFeaturesNV.html
  struct PhysicalDeviceDiagnosticsConfigFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( Bool32 diagnosticsConfig_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , diagnosticsConfig{ diagnosticsConfig_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
    {
      diagnosticsConfig = diagnosticsConfig_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, diagnosticsConfig );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig );
#  endif
    }

    bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
    void *        pNext             = {};
    Bool32        diagnosticsConfig = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDiagnosticsConfigFeaturesNV>
  {
    using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
  {
    using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceDiscardRectanglePropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html
  struct PhysicalDeviceDiscardRectanglePropertiesEXT
  {
    using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxDiscardRectangles{ maxDiscardRectangles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxDiscardRectangles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
#  endif
    }

    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
    void *        pNext                = {};
    uint32_t      maxDiscardRectangles = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDiscardRectanglePropertiesEXT>
  {
    using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
  {
    using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapFeaturesNV.html
  struct PhysicalDeviceDisplacementMicromapFeaturesNV
  {
    using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( Bool32 displacementMicromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , displacementMicromap{ displacementMicromap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast<PhysicalDeviceDisplacementMicromapFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDisplacementMicromapFeaturesNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setDisplacementMicromap( Bool32 displacementMicromap_ ) VULKAN_HPP_NOEXCEPT
    {
      displacementMicromap = displacementMicromap_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDisplacementMicromapFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDisplacementMicromapFeaturesNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, displacementMicromap );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default;
#  else
    bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap );
#    endif
    }

    bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV;
    void *        pNext                = {};
    Bool32        displacementMicromap = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDisplacementMicromapFeaturesNV>
  {
    using Type = PhysicalDeviceDisplacementMicromapFeaturesNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV>
  {
    using Type = PhysicalDeviceDisplacementMicromapFeaturesNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapPropertiesNV.html
  struct PhysicalDeviceDisplacementMicromapPropertiesNV
  {
    using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {},
                                                                         void *   pNext_                                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast<PhysicalDeviceDisplacementMicromapPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDisplacementMicromapPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDisplacementMicromapPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDisplacementMicromapPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDisplacementMicromapPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDisplacementMicromapPropertiesNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default;
#  else
    bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel );
#    endif
    }

    bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                                   = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV;
    void *        pNext                                   = {};
    uint32_t      maxDisplacementMicromapSubdivisionLevel = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDisplacementMicromapPropertiesNV>
  {
    using Type = PhysicalDeviceDisplacementMicromapPropertiesNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV>
  {
    using Type = PhysicalDeviceDisplacementMicromapPropertiesNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDeviceDriverProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDriverProperties.html
  struct PhysicalDeviceDriverProperties
  {
    using NativeType = VkPhysicalDeviceDriverProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDriverProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( DriverId                                          driverID_           = DriverId::eAmdProprietary,
                                                            std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_         = {},
                                                            std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_         = {},
                                                            ConformanceVersion                                conformanceVersion_ = {},
                                                            void *                                            pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , driverID{ driverID_ }
      , driverName{ driverName_ }
      , driverInfo{ driverInfo_ }
      , conformanceVersion{ conformanceVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDriverProperties *>( this );
    }

    operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDriverProperties *>( this );
    }

    operator VkPhysicalDeviceDriverProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDriverProperties *>( this );
    }

    operator VkPhysicalDeviceDriverProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDriverProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               DriverId const &,
               ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &,
               ConformanceVersion const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
             ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion );
    }

    bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType              = StructureType::ePhysicalDeviceDriverProperties;
    void *                                        pNext              = {};
    DriverId                                      driverID           = DriverId::eAmdProprietary;
    ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName         = {};
    ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo         = {};
    ConformanceVersion                            conformanceVersion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDriverProperties>
  {
    using Type = PhysicalDeviceDriverProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
  {
    using Type = PhysicalDeviceDriverProperties;
  };

  using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;

  // wrapper struct for struct VkPhysicalDeviceDrmPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDrmPropertiesEXT.html
  struct PhysicalDeviceDrmPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceDrmPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDrmPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( Bool32  hasPrimary_   = {},
                                                         Bool32  hasRender_    = {},
                                                         int64_t primaryMajor_ = {},
                                                         int64_t primaryMinor_ = {},
                                                         int64_t renderMajor_  = {},
                                                         int64_t renderMinor_  = {},
                                                         void *  pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hasPrimary{ hasPrimary_ }
      , hasRender{ hasRender_ }
      , primaryMajor{ primaryMajor_ }
      , primaryMinor{ primaryMinor_ }
      , renderMajor{ renderMajor_ }
      , renderMinor{ renderMinor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDrmPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDrmPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, int64_t const &, int64_t const &, int64_t const &, int64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) &&
             ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) &&
             ( renderMinor == rhs.renderMinor );
#  endif
    }

    bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceDrmPropertiesEXT;
    void *        pNext        = {};
    Bool32        hasPrimary   = {};
    Bool32        hasRender    = {};
    int64_t       primaryMajor = {};
    int64_t       primaryMinor = {};
    int64_t       renderMajor  = {};
    int64_t       renderMinor  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDrmPropertiesEXT>
  {
    using Type = PhysicalDeviceDrmPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDrmPropertiesEXT>
  {
    using Type = PhysicalDeviceDrmPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceDynamicRenderingFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingFeatures.html
  struct PhysicalDeviceDynamicRenderingFeatures
  {
    using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDynamicRenderingFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( Bool32 dynamicRendering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dynamicRendering{ dynamicRendering_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicRendering = dynamicRendering_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dynamicRendering );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering );
#  endif
    }

    bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
    void *        pNext            = {};
    Bool32        dynamicRendering = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDynamicRenderingFeatures>
  {
    using Type = PhysicalDeviceDynamicRenderingFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
  {
    using Type = PhysicalDeviceDynamicRenderingFeatures;
  };

  using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;

  // wrapper struct for struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingLocalReadFeatures.html
  struct PhysicalDeviceDynamicRenderingLocalReadFeatures
  {
    using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( Bool32 dynamicRenderingLocalRead_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingLocalReadFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDynamicRenderingLocalReadFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures &
      setDynamicRenderingLocalRead( Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicRenderingLocalRead = dynamicRenderingLocalRead_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingLocalReadFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingLocalReadFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingLocalReadFeatures *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDynamicRenderingLocalReadFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dynamicRenderingLocalRead );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead );
#  endif
    }

    bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures;
    void *        pNext                     = {};
    Bool32        dynamicRenderingLocalRead = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDynamicRenderingLocalReadFeatures>
  {
    using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures>
  {
    using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures;
  };

  using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures;

  // wrapper struct for struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.html
  struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( Bool32 dynamicRenderingUnusedAttachments_ = {},
                                                                                     void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &
      operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &
      operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &
      setDynamicRenderingUnusedAttachments( Bool32 dynamicRenderingUnusedAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dynamicRenderingUnusedAttachments );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments );
#  endif
    }

    bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
    void *        pNext                             = {};
    Bool32        dynamicRenderingUnusedAttachments = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT>
  {
    using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT>
  {
    using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExclusiveScissorFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html
  struct PhysicalDeviceExclusiveScissorFeaturesNV
  {
    using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( Bool32 exclusiveScissor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , exclusiveScissor{ exclusiveScissor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissor = exclusiveScissor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, exclusiveScissor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor );
#  endif
    }

    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
    void *        pNext            = {};
    Bool32        exclusiveScissor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExclusiveScissorFeaturesNV>
  {
    using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
  {
    using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState2FeaturesEXT.html
  struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( Bool32 extendedDynamicState2_                   = {},
                                                                         Bool32 extendedDynamicState2LogicOp_            = {},
                                                                         Bool32 extendedDynamicState2PatchControlPoints_ = {},
                                                                         void * pNext_                                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , extendedDynamicState2{ extendedDynamicState2_ }
      , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ }
      , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2( Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2 = extendedDynamicState2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
      setExtendedDynamicState2LogicOp( Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
      setExtendedDynamicState2PatchControlPoints( Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) &&
             ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) &&
             ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                   = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
    void *        pNext                                   = {};
    Bool32        extendedDynamicState2                   = {};
    Bool32        extendedDynamicState2LogicOp            = {};
    Bool32        extendedDynamicState2PatchControlPoints = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3FeaturesEXT.html
  struct PhysicalDeviceExtendedDynamicState3FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT( Bool32 extendedDynamicState3TessellationDomainOrigin_         = {},
                                                                         Bool32 extendedDynamicState3DepthClampEnable_                 = {},
                                                                         Bool32 extendedDynamicState3PolygonMode_                      = {},
                                                                         Bool32 extendedDynamicState3RasterizationSamples_             = {},
                                                                         Bool32 extendedDynamicState3SampleMask_                       = {},
                                                                         Bool32 extendedDynamicState3AlphaToCoverageEnable_            = {},
                                                                         Bool32 extendedDynamicState3AlphaToOneEnable_                 = {},
                                                                         Bool32 extendedDynamicState3LogicOpEnable_                    = {},
                                                                         Bool32 extendedDynamicState3ColorBlendEnable_                 = {},
                                                                         Bool32 extendedDynamicState3ColorBlendEquation_               = {},
                                                                         Bool32 extendedDynamicState3ColorWriteMask_                   = {},
                                                                         Bool32 extendedDynamicState3RasterizationStream_              = {},
                                                                         Bool32 extendedDynamicState3ConservativeRasterizationMode_    = {},
                                                                         Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {},
                                                                         Bool32 extendedDynamicState3DepthClipEnable_                  = {},
                                                                         Bool32 extendedDynamicState3SampleLocationsEnable_            = {},
                                                                         Bool32 extendedDynamicState3ColorBlendAdvanced_               = {},
                                                                         Bool32 extendedDynamicState3ProvokingVertexMode_              = {},
                                                                         Bool32 extendedDynamicState3LineRasterizationMode_            = {},
                                                                         Bool32 extendedDynamicState3LineStippleEnable_                = {},
                                                                         Bool32 extendedDynamicState3DepthClipNegativeOneToOne_        = {},
                                                                         Bool32 extendedDynamicState3ViewportWScalingEnable_           = {},
                                                                         Bool32 extendedDynamicState3ViewportSwizzle_                  = {},
                                                                         Bool32 extendedDynamicState3CoverageToColorEnable_            = {},
                                                                         Bool32 extendedDynamicState3CoverageToColorLocation_          = {},
                                                                         Bool32 extendedDynamicState3CoverageModulationMode_           = {},
                                                                         Bool32 extendedDynamicState3CoverageModulationTableEnable_    = {},
                                                                         Bool32 extendedDynamicState3CoverageModulationTable_          = {},
                                                                         Bool32 extendedDynamicState3CoverageReductionMode_            = {},
                                                                         Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {},
                                                                         Bool32 extendedDynamicState3ShadingRateImageEnable_           = {},
                                                                         void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ }
      , extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ }
      , extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ }
      , extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ }
      , extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ }
      , extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ }
      , extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ }
      , extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ }
      , extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ }
      , extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ }
      , extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ }
      , extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ }
      , extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ }
      , extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ }
      , extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ }
      , extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ }
      , extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ }
      , extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ }
      , extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ }
      , extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ }
      , extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ }
      , extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ }
      , extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ }
      , extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ }
      , extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ }
      , extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ }
      , extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ }
      , extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ }
      , extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ }
      , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ }
      , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedDynamicState3FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3TessellationDomainOrigin( Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3DepthClampEnable( Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3PolygonMode( Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3RasterizationSamples( Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3SampleMask( Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3AlphaToCoverageEnable( Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3AlphaToOneEnable( Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3LogicOpEnable( Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ColorBlendEnable( Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ColorBlendEquation( Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ColorWriteMask( Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3RasterizationStream( Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ConservativeRasterizationMode( Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ExtraPrimitiveOverestimationSize( Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3DepthClipEnable( Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3SampleLocationsEnable( Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ColorBlendAdvanced( Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ProvokingVertexMode( Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3LineRasterizationMode( Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3LineStippleEnable( Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3DepthClipNegativeOneToOne( Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ViewportWScalingEnable( Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ViewportSwizzle( Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageToColorEnable( Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageToColorLocation( Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageModulationMode( Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageModulationTableEnable( Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageModulationTable( Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3CoverageReductionMode( Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3RepresentativeFragmentTestEnable( Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT &
      setExtendedDynamicState3ShadingRateImageEnable( Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       extendedDynamicState3TessellationDomainOrigin,
                       extendedDynamicState3DepthClampEnable,
                       extendedDynamicState3PolygonMode,
                       extendedDynamicState3RasterizationSamples,
                       extendedDynamicState3SampleMask,
                       extendedDynamicState3AlphaToCoverageEnable,
                       extendedDynamicState3AlphaToOneEnable,
                       extendedDynamicState3LogicOpEnable,
                       extendedDynamicState3ColorBlendEnable,
                       extendedDynamicState3ColorBlendEquation,
                       extendedDynamicState3ColorWriteMask,
                       extendedDynamicState3RasterizationStream,
                       extendedDynamicState3ConservativeRasterizationMode,
                       extendedDynamicState3ExtraPrimitiveOverestimationSize,
                       extendedDynamicState3DepthClipEnable,
                       extendedDynamicState3SampleLocationsEnable,
                       extendedDynamicState3ColorBlendAdvanced,
                       extendedDynamicState3ProvokingVertexMode,
                       extendedDynamicState3LineRasterizationMode,
                       extendedDynamicState3LineStippleEnable,
                       extendedDynamicState3DepthClipNegativeOneToOne,
                       extendedDynamicState3ViewportWScalingEnable,
                       extendedDynamicState3ViewportSwizzle,
                       extendedDynamicState3CoverageToColorEnable,
                       extendedDynamicState3CoverageToColorLocation,
                       extendedDynamicState3CoverageModulationMode,
                       extendedDynamicState3CoverageModulationTableEnable,
                       extendedDynamicState3CoverageModulationTable,
                       extendedDynamicState3CoverageReductionMode,
                       extendedDynamicState3RepresentativeFragmentTestEnable,
                       extendedDynamicState3ShadingRateImageEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) &&
             ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) &&
             ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) &&
             ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) &&
             ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) &&
             ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) &&
             ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) &&
             ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) &&
             ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) &&
             ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) &&
             ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) &&
             ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) &&
             ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) &&
             ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) &&
             ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) &&
             ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) &&
             ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) &&
             ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) &&
             ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) &&
             ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) &&
             ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) &&
             ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) &&
             ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) &&
             ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) &&
             ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) &&
             ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) &&
             ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) &&
             ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) &&
             ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) &&
             ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) &&
             ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                 = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT;
    void *        pNext                                                 = {};
    Bool32        extendedDynamicState3TessellationDomainOrigin         = {};
    Bool32        extendedDynamicState3DepthClampEnable                 = {};
    Bool32        extendedDynamicState3PolygonMode                      = {};
    Bool32        extendedDynamicState3RasterizationSamples             = {};
    Bool32        extendedDynamicState3SampleMask                       = {};
    Bool32        extendedDynamicState3AlphaToCoverageEnable            = {};
    Bool32        extendedDynamicState3AlphaToOneEnable                 = {};
    Bool32        extendedDynamicState3LogicOpEnable                    = {};
    Bool32        extendedDynamicState3ColorBlendEnable                 = {};
    Bool32        extendedDynamicState3ColorBlendEquation               = {};
    Bool32        extendedDynamicState3ColorWriteMask                   = {};
    Bool32        extendedDynamicState3RasterizationStream              = {};
    Bool32        extendedDynamicState3ConservativeRasterizationMode    = {};
    Bool32        extendedDynamicState3ExtraPrimitiveOverestimationSize = {};
    Bool32        extendedDynamicState3DepthClipEnable                  = {};
    Bool32        extendedDynamicState3SampleLocationsEnable            = {};
    Bool32        extendedDynamicState3ColorBlendAdvanced               = {};
    Bool32        extendedDynamicState3ProvokingVertexMode              = {};
    Bool32        extendedDynamicState3LineRasterizationMode            = {};
    Bool32        extendedDynamicState3LineStippleEnable                = {};
    Bool32        extendedDynamicState3DepthClipNegativeOneToOne        = {};
    Bool32        extendedDynamicState3ViewportWScalingEnable           = {};
    Bool32        extendedDynamicState3ViewportSwizzle                  = {};
    Bool32        extendedDynamicState3CoverageToColorEnable            = {};
    Bool32        extendedDynamicState3CoverageToColorLocation          = {};
    Bool32        extendedDynamicState3CoverageModulationMode           = {};
    Bool32        extendedDynamicState3CoverageModulationTableEnable    = {};
    Bool32        extendedDynamicState3CoverageModulationTable          = {};
    Bool32        extendedDynamicState3CoverageReductionMode            = {};
    Bool32        extendedDynamicState3RepresentativeFragmentTestEnable = {};
    Bool32        extendedDynamicState3ShadingRateImageEnable           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedDynamicState3FeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3PropertiesEXT.html
  struct PhysicalDeviceExtendedDynamicState3PropertiesEXT
  {
    using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( Bool32 dynamicPrimitiveTopologyUnrestricted_ = {},
                                                                           void * pNext_                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedDynamicState3PropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT;
    void *        pNext                                = {};
    Bool32        dynamicPrimitiveTopologyUnrestricted = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedDynamicState3PropertiesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicStateFeaturesEXT.html
  struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( Bool32 extendedDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , extendedDynamicState{ extendedDynamicState_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState = extendedDynamicState_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, extendedDynamicState );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
    void *        pNext                = {};
    Bool32        extendedDynamicState = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV.html
  struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV
  {
    using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( Bool32 extendedSparseAddressSpace_ = {},
                                                                             void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , extendedSparseAddressSpace{ extendedSparseAddressSpace_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &
      operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &
      setExtendedSparseAddressSpace( Bool32 extendedSparseAddressSpace_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedSparseAddressSpace = extendedSparseAddressSpace_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, extendedSparseAddressSpace );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
    void *        pNext                      = {};
    Bool32        extendedSparseAddressSpace = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV>
  {
    using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV>
  {
    using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV.html
  struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV
  {
    using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( DeviceSize       extendedSparseAddressSpaceSize_ = {},
                                                                               ImageUsageFlags  extendedSparseImageUsageFlags_  = {},
                                                                               BufferUsageFlags extendedSparseBufferUsageFlags_ = {},
                                                                               void *           pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ }
      , extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ }
      , extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceExtendedSparseAddressSpacePropertiesNV &
      operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, ImageUsageFlags const &, BufferUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize ) &&
             ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags ) && ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags );
#  endif
    }

    bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                          = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
    void *           pNext                          = {};
    DeviceSize       extendedSparseAddressSpaceSize = {};
    ImageUsageFlags  extendedSparseImageUsageFlags  = {};
    BufferUsageFlags extendedSparseBufferUsageFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV>
  {
    using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV>
  {
    using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceExternalBufferInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalBufferInfo.html
  struct PhysicalDeviceExternalBufferInfo
  {
    using NativeType = VkPhysicalDeviceExternalBufferInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalBufferInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( BufferCreateFlags                flags_      = {},
                                                           BufferUsageFlags                 usage_      = {},
                                                           ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                           const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , usage{ usage_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( this );
    }

    operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>( this );
    }

    operator VkPhysicalDeviceExternalBufferInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( this );
    }

    operator VkPhysicalDeviceExternalBufferInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, BufferCreateFlags const &, BufferUsageFlags const &, ExternalMemoryHandleTypeFlagBits const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, usage, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType      = StructureType::ePhysicalDeviceExternalBufferInfo;
    const void *                     pNext      = {};
    BufferCreateFlags                flags      = {};
    BufferUsageFlags                 usage      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalBufferInfo>
  {
    using Type = PhysicalDeviceExternalBufferInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
  {
    using Type = PhysicalDeviceExternalBufferInfo;
  };

  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;

  // wrapper struct for struct VkPhysicalDeviceExternalComputeQueuePropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalComputeQueuePropertiesNV.html
  struct PhysicalDeviceExternalComputeQueuePropertiesNV
  {
    using NativeType = VkPhysicalDeviceExternalComputeQueuePropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV( uint32_t externalDataSize_  = {},
                                                                         uint32_t maxExternalQueues_ = {},
                                                                         void *   pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalDataSize{ externalDataSize_ }
      , maxExternalQueues{ maxExternalQueues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalComputeQueuePropertiesNV( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalComputeQueuePropertiesNV( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalComputeQueuePropertiesNV( *reinterpret_cast<PhysicalDeviceExternalComputeQueuePropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalComputeQueuePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalComputeQueuePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExternalComputeQueuePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalComputeQueuePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalComputeQueuePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceExternalComputeQueuePropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalComputeQueuePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalDataSize, maxExternalQueues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalComputeQueuePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalDataSize == rhs.externalDataSize ) && ( maxExternalQueues == rhs.maxExternalQueues );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV;
    void *        pNext             = {};
    uint32_t      externalDataSize  = {};
    uint32_t      maxExternalQueues = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalComputeQueuePropertiesNV>
  {
    using Type = PhysicalDeviceExternalComputeQueuePropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV>
  {
    using Type = PhysicalDeviceExternalComputeQueuePropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceExternalFenceInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFenceInfo.html
  struct PhysicalDeviceExternalFenceInfo
  {
    using NativeType = VkPhysicalDeviceExternalFenceInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalFenceInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                                          const void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( this );
    }

    operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>( this );
    }

    operator VkPhysicalDeviceExternalFenceInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( this );
    }

    operator VkPhysicalDeviceExternalFenceInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalFenceHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType      = StructureType::ePhysicalDeviceExternalFenceInfo;
    const void *                    pNext      = {};
    ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalFenceInfo>
  {
    using Type = PhysicalDeviceExternalFenceInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
  {
    using Type = PhysicalDeviceExternalFenceInfo;
  };

  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolveFeaturesANDROID.html
  struct PhysicalDeviceExternalFormatResolveFeaturesANDROID
  {
    using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( Bool32 externalFormatResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalFormatResolve{ externalFormatResolve_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast<PhysicalDeviceExternalFormatResolveFeaturesANDROID const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalFormatResolveFeaturesANDROID &
      operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalFormatResolveFeaturesANDROID const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setExternalFormatResolve( Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT
    {
      externalFormatResolve = externalFormatResolve_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalFormatResolve );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default;
#  else
    bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve );
#    endif
    }

    bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID;
    void *        pNext                 = {};
    Bool32        externalFormatResolve = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalFormatResolveFeaturesANDROID>
  {
    using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID>
  {
    using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  // wrapper struct for struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolvePropertiesANDROID.html
  struct PhysicalDeviceExternalFormatResolvePropertiesANDROID
  {
    using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalFormatResolvePropertiesANDROID( Bool32         nullColorAttachmentWithExternalFormatResolve_ = {},
                                                            ChromaLocation externalFormatResolveChromaOffsetX_           = ChromaLocation::eCositedEven,
                                                            ChromaLocation externalFormatResolveChromaOffsetY_           = ChromaLocation::eCositedEven,
                                                            void *         pNext_                                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ }
      , externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ }
      , externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast<PhysicalDeviceExternalFormatResolvePropertiesANDROID const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalFormatResolvePropertiesANDROID &
      operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalFormatResolvePropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *>( this );
    }

    operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, ChromaLocation const &, ChromaLocation const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default;
#  else
    bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) &&
             ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) &&
             ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY );
#    endif
    }

    bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType  sType                                        = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID;
    void *         pNext                                        = {};
    Bool32         nullColorAttachmentWithExternalFormatResolve = {};
    ChromaLocation externalFormatResolveChromaOffsetX           = ChromaLocation::eCositedEven;
    ChromaLocation externalFormatResolveChromaOffsetY           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalFormatResolvePropertiesANDROID>
  {
    using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID>
  {
    using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  // wrapper struct for struct VkPhysicalDeviceExternalImageFormatInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalImageFormatInfo.html
  struct PhysicalDeviceExternalImageFormatInfo
  {
    using NativeType = VkPhysicalDeviceExternalImageFormatInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalImageFormatInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                                const void *                     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

    operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

    operator VkPhysicalDeviceExternalImageFormatInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

    operator VkPhysicalDeviceExternalImageFormatInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalMemoryHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType      = StructureType::ePhysicalDeviceExternalImageFormatInfo;
    const void *                     pNext      = {};
    ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalImageFormatInfo>
  {
    using Type = PhysicalDeviceExternalImageFormatInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
  {
    using Type = PhysicalDeviceExternalImageFormatInfo;
  };

  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;

  // wrapper struct for struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html
  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = {},
                                                                        void *     pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minImportedHostPointerAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
    void *        pNext                           = {};
    DeviceSize    minImportedHostPointerAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalMemoryHostPropertiesEXT>
  {
    using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
  {
    using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryRDMAFeaturesNV.html
  struct PhysicalDeviceExternalMemoryRDMAFeaturesNV
  {
    using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( Bool32 externalMemoryRDMA_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , externalMemoryRDMA{ externalMemoryRDMA_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA( Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
    {
      externalMemoryRDMA = externalMemoryRDMA_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, externalMemoryRDMA );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
    void *        pNext              = {};
    Bool32        externalMemoryRDMA = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV>
  {
    using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV>
  {
    using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
  };

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX.html
  struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX
  {
    using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( Bool32 screenBufferImport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , screenBufferImport{ screenBufferImport_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast<PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &
      operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setScreenBufferImport( Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT
    {
      screenBufferImport = screenBufferImport_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, screenBufferImport );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default;
#  else
    bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport );
#    endif
    }

    bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
    void *        pNext              = {};
    Bool32        screenBufferImport = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>
  {
    using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>
  {
    using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

  // wrapper struct for struct VkPhysicalDeviceExternalSemaphoreInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html
  struct PhysicalDeviceExternalSemaphoreInfo
  {
    using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalSemaphoreInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                              const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

    operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

    operator VkPhysicalDeviceExternalSemaphoreInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

    operator VkPhysicalDeviceExternalSemaphoreInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalSemaphoreHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType      = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
    const void *                        pNext      = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalSemaphoreInfo>
  {
    using Type = PhysicalDeviceExternalSemaphoreInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
  {
    using Type = PhysicalDeviceExternalSemaphoreInfo;
  };

  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;

  // wrapper struct for struct VkPhysicalDeviceExternalTensorInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalTensorInfoARM.html
  struct PhysicalDeviceExternalTensorInfoARM
  {
    using NativeType = VkPhysicalDeviceExternalTensorInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceExternalTensorInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM( TensorCreateFlagsARM             flags_        = {},
                                                              const TensorDescriptionARM *     pDescription_ = {},
                                                              ExternalMemoryHandleTypeFlagBits handleType_   = ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                              const void *                     pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pDescription{ pDescription_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalTensorInfoARM( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalTensorInfoARM( *reinterpret_cast<PhysicalDeviceExternalTensorInfoARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceExternalTensorInfoARM & operator=( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceExternalTensorInfoARM & operator=( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceExternalTensorInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setFlags( TensorCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPDescription( const TensorDescriptionARM * pDescription_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescription = pDescription_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceExternalTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( this );
    }

    operator VkPhysicalDeviceExternalTensorInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalTensorInfoARM *>( this );
    }

    operator VkPhysicalDeviceExternalTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( this );
    }

    operator VkPhysicalDeviceExternalTensorInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceExternalTensorInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               TensorCreateFlagsARM const &,
               const TensorDescriptionARM * const &,
               ExternalMemoryHandleTypeFlagBits const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, pDescription, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalTensorInfoARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pDescription == rhs.pDescription ) &&
             ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType        = StructureType::ePhysicalDeviceExternalTensorInfoARM;
    const void *                     pNext        = {};
    TensorCreateFlagsARM             flags        = {};
    const TensorDescriptionARM *     pDescription = {};
    ExternalMemoryHandleTypeFlagBits handleType   = ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceExternalTensorInfoARM>
  {
    using Type = PhysicalDeviceExternalTensorInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalTensorInfoARM>
  {
    using Type = PhysicalDeviceExternalTensorInfoARM;
  };

  // wrapper struct for struct VkPhysicalDeviceFaultFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFaultFeaturesEXT.html
  struct PhysicalDeviceFaultFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFaultFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFaultFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFaultFeaturesEXT( Bool32 deviceFault_ = {}, Bool32 deviceFaultVendorBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceFault{ deviceFault_ }
      , deviceFaultVendorBinary{ deviceFaultVendorBinary_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFaultFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceFault = deviceFault_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFaultVendorBinary( Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceFaultVendorBinary = deviceFaultVendorBinary_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFaultFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFaultFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFaultFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFaultFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) &&
             ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary );
#  endif
    }

    bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceFaultFeaturesEXT;
    void *        pNext                   = {};
    Bool32        deviceFault             = {};
    Bool32        deviceFaultVendorBinary = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFaultFeaturesEXT>
  {
    using Type = PhysicalDeviceFaultFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFaultFeaturesEXT>
  {
    using Type = PhysicalDeviceFaultFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures2.html
  struct PhysicalDeviceFeatures2
  {
    using NativeType = VkPhysicalDeviceFeatures2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFeatures2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , features{ features_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
    {
    }

    PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
    {
      features = features_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFeatures2 *>( this );
    }

    operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFeatures2 *>( this );
    }

    operator VkPhysicalDeviceFeatures2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFeatures2 *>( this );
    }

    operator VkPhysicalDeviceFeatures2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFeatures2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PhysicalDeviceFeatures const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, features );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features );
#  endif
    }

    bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType    = StructureType::ePhysicalDeviceFeatures2;
    void *                 pNext    = {};
    PhysicalDeviceFeatures features = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFeatures2>
  {
    using Type = PhysicalDeviceFeatures2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
  {
    using Type = PhysicalDeviceFeatures2;
  };

  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;

  // wrapper struct for struct VkPhysicalDeviceFloatControlsProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFloatControlsProperties.html
  struct PhysicalDeviceFloatControlsProperties
  {
    using NativeType = VkPhysicalDeviceFloatControlsProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFloatControlsProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFloatControlsProperties( ShaderFloatControlsIndependence denormBehaviorIndependence_ = ShaderFloatControlsIndependence::e32BitOnly,
                                             ShaderFloatControlsIndependence roundingModeIndependence_   = ShaderFloatControlsIndependence::e32BitOnly,
                                             Bool32                          shaderSignedZeroInfNanPreserveFloat16_ = {},
                                             Bool32                          shaderSignedZeroInfNanPreserveFloat32_ = {},
                                             Bool32                          shaderSignedZeroInfNanPreserveFloat64_ = {},
                                             Bool32                          shaderDenormPreserveFloat16_           = {},
                                             Bool32                          shaderDenormPreserveFloat32_           = {},
                                             Bool32                          shaderDenormPreserveFloat64_           = {},
                                             Bool32                          shaderDenormFlushToZeroFloat16_        = {},
                                             Bool32                          shaderDenormFlushToZeroFloat32_        = {},
                                             Bool32                          shaderDenormFlushToZeroFloat64_        = {},
                                             Bool32                          shaderRoundingModeRTEFloat16_          = {},
                                             Bool32                          shaderRoundingModeRTEFloat32_          = {},
                                             Bool32                          shaderRoundingModeRTEFloat64_          = {},
                                             Bool32                          shaderRoundingModeRTZFloat16_          = {},
                                             Bool32                          shaderRoundingModeRTZFloat32_          = {},
                                             Bool32                          shaderRoundingModeRTZFloat64_          = {},
                                             void *                          pNext_                                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , denormBehaviorIndependence{ denormBehaviorIndependence_ }
      , roundingModeIndependence{ roundingModeIndependence_ }
      , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ }
      , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ }
      , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ }
      , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ }
      , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ }
      , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ }
      , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ }
      , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ }
      , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ }
      , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ }
      , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ }
      , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ }
      , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ }
      , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ }
      , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties *>( this );
    }

    operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>( this );
    }

    operator VkPhysicalDeviceFloatControlsProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties *>( this );
    }

    operator VkPhysicalDeviceFloatControlsProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ShaderFloatControlsIndependence const &,
               ShaderFloatControlsIndependence const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       denormBehaviorIndependence,
                       roundingModeIndependence,
                       shaderSignedZeroInfNanPreserveFloat16,
                       shaderSignedZeroInfNanPreserveFloat32,
                       shaderSignedZeroInfNanPreserveFloat64,
                       shaderDenormPreserveFloat16,
                       shaderDenormPreserveFloat32,
                       shaderDenormPreserveFloat64,
                       shaderDenormFlushToZeroFloat16,
                       shaderDenormFlushToZeroFloat32,
                       shaderDenormFlushToZeroFloat64,
                       shaderRoundingModeRTEFloat16,
                       shaderRoundingModeRTEFloat32,
                       shaderRoundingModeRTEFloat64,
                       shaderRoundingModeRTZFloat16,
                       shaderRoundingModeRTZFloat32,
                       shaderRoundingModeRTZFloat64 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
             ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
             ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
             ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
             ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
             ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
             ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
             ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
             ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
             ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
             ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
             ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
#  endif
    }

    bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                                 = StructureType::ePhysicalDeviceFloatControlsProperties;
    void *                          pNext                                 = {};
    ShaderFloatControlsIndependence denormBehaviorIndependence            = ShaderFloatControlsIndependence::e32BitOnly;
    ShaderFloatControlsIndependence roundingModeIndependence              = ShaderFloatControlsIndependence::e32BitOnly;
    Bool32                          shaderSignedZeroInfNanPreserveFloat16 = {};
    Bool32                          shaderSignedZeroInfNanPreserveFloat32 = {};
    Bool32                          shaderSignedZeroInfNanPreserveFloat64 = {};
    Bool32                          shaderDenormPreserveFloat16           = {};
    Bool32                          shaderDenormPreserveFloat32           = {};
    Bool32                          shaderDenormPreserveFloat64           = {};
    Bool32                          shaderDenormFlushToZeroFloat16        = {};
    Bool32                          shaderDenormFlushToZeroFloat32        = {};
    Bool32                          shaderDenormFlushToZeroFloat64        = {};
    Bool32                          shaderRoundingModeRTEFloat16          = {};
    Bool32                          shaderRoundingModeRTEFloat32          = {};
    Bool32                          shaderRoundingModeRTEFloat64          = {};
    Bool32                          shaderRoundingModeRTZFloat16          = {};
    Bool32                          shaderRoundingModeRTZFloat32          = {};
    Bool32                          shaderRoundingModeRTZFloat64          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFloatControlsProperties>
  {
    using Type = PhysicalDeviceFloatControlsProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
  {
    using Type = PhysicalDeviceFloatControlsProperties;
  };

  using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;

  // wrapper struct for struct VkPhysicalDeviceFormatPackFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFormatPackFeaturesARM.html
  struct PhysicalDeviceFormatPackFeaturesARM
  {
    using NativeType = VkPhysicalDeviceFormatPackFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFormatPackFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM( Bool32 formatPack_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , formatPack{ formatPack_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFormatPackFeaturesARM( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFormatPackFeaturesARM( *reinterpret_cast<PhysicalDeviceFormatPackFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceFormatPackFeaturesARM & operator=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFormatPackFeaturesARM & operator=( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFormatPackFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setFormatPack( Bool32 formatPack_ ) VULKAN_HPP_NOEXCEPT
    {
      formatPack = formatPack_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFormatPackFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFormatPackFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceFormatPackFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFormatPackFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceFormatPackFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFormatPackFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceFormatPackFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFormatPackFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, formatPack );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFormatPackFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatPack == rhs.formatPack );
#  endif
    }

    bool operator!=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::ePhysicalDeviceFormatPackFeaturesARM;
    void *        pNext      = {};
    Bool32        formatPack = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFormatPackFeaturesARM>
  {
    using Type = PhysicalDeviceFormatPackFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFormatPackFeaturesARM>
  {
    using Type = PhysicalDeviceFormatPackFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2FeaturesEXT.html
  struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( Bool32 fragmentDensityMapDeferred_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT &
      setFragmentDensityMapDeferred( Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityMapDeferred );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
    void *        pNext                      = {};
    Bool32        fragmentDensityMapDeferred = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2PropertiesEXT.html
  struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( Bool32   subsampledLoads_                           = {},
                                                                         Bool32   subsampledCoarseReconstructionEarlyAccess_ = {},
                                                                         uint32_t maxSubsampledArrayLayers_                  = {},
                                                                         uint32_t maxDescriptorSetSubsampledSamplers_        = {},
                                                                         void *   pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subsampledLoads{ subsampledLoads_ }
      , subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ }
      , maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ }
      , maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) &&
             ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) &&
             ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                     = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
    void *        pNext                                     = {};
    Bool32        subsampledLoads                           = {};
    Bool32        subsampledCoarseReconstructionEarlyAccess = {};
    uint32_t      maxSubsampledArrayLayers                  = {};
    uint32_t      maxDescriptorSetSubsampledSamplers        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html
  struct PhysicalDeviceFragmentDensityMapFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( Bool32 fragmentDensityMap_                    = {},
                                                                      Bool32 fragmentDensityMapDynamic_             = {},
                                                                      Bool32 fragmentDensityMapNonSubsampledImages_ = {},
                                                                      void * pNext_                                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityMap{ fragmentDensityMap_ }
      , fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ }
      , fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMap = fragmentDensityMap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &
      setFragmentDensityMapNonSubsampledImages( Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) &&
             ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) &&
             ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                 = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
    void *        pNext                                 = {};
    Bool32        fragmentDensityMap                    = {};
    Bool32        fragmentDensityMapDynamic             = {};
    Bool32        fragmentDensityMapNonSubsampledImages = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE.html
  struct PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( Bool32 fragmentDensityMapLayered_ = {},
                                                                               void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityMapLayered{ fragmentDensityMapLayered_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( *reinterpret_cast<PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE &
      operator=( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & operator=( VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE &
      setFragmentDensityMapLayered( Bool32 fragmentDensityMapLayered_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapLayered = fragmentDensityMapLayered_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityMapLayered );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapLayered == rhs.fragmentDensityMapLayered );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE;
    void *        pNext                     = {};
    Bool32        fragmentDensityMapLayered = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE>
  {
    using Type = PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE>
  {
    using Type = PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE.html
  struct PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( uint32_t maxFragmentDensityMapLayers_ = {},
                                                                                 void *   pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxFragmentDensityMapLayers{ maxFragmentDensityMapLayers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( *reinterpret_cast<PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE &
      operator=( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE &
      operator=( VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxFragmentDensityMapLayers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentDensityMapLayers == rhs.maxFragmentDensityMapLayers );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE;
    void *        pNext                       = {};
    uint32_t      maxFragmentDensityMapLayers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE>
  {
    using Type = PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE>
  {
    using Type = PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT.html
  struct PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( Bool32 fragmentDensityMapOffset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityMapOffset{ fragmentDensityMapOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &
      operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &
      setFragmentDensityMapOffset( Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapOffset = fragmentDensityMapOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityMapOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
    void *        pNext                    = {};
    Bool32        fragmentDensityMapOffset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;
  };

  using PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT;

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT.html
  struct PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( Extent2D fragmentDensityOffsetGranularity_ = {},
                                                                              void *   pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT &
      operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityOffsetGranularity );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
    void *        pNext                            = {};
    Extent2D      fragmentDensityOffsetGranularity = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;
  };

  using PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT;

  // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html
  struct PhysicalDeviceFragmentDensityMapPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( Extent2D minFragmentDensityTexelSize_ = {},
                                                                        Extent2D maxFragmentDensityTexelSize_ = {},
                                                                        Bool32   fragmentDensityInvocations_  = {},
                                                                        void *   pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ }
      , maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ }
      , fragmentDensityInvocations{ fragmentDensityInvocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &, Extent2D const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) &&
             ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
    void *        pNext                       = {};
    Extent2D      minFragmentDensityTexelSize = {};
    Extent2D      maxFragmentDensityTexelSize = {};
    Bool32        fragmentDensityInvocations  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentDensityMapPropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR.html
  struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( Bool32 fragmentShaderBarycentric_ = {},
                                                                             void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentShaderBarycentric{ fragmentShaderBarycentric_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR &
      operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR &
      setFragmentShaderBarycentric( Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderBarycentric = fragmentShaderBarycentric_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentShaderBarycentric );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
    void *        pNext                     = {};
    Bool32        fragmentShaderBarycentric = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
  {
    using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR>
  {
    using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
  };

  using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;

  // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR.html
  struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {},
                                                                               void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR &
      operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                           = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
    void *        pNext                                           = {};
    Bool32        triStripVertexOrderIndependentOfProvokingVertex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
  {
    using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR>
  {
    using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT.html
  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( Bool32 fragmentShaderSampleInterlock_      = {},
                                                                           Bool32 fragmentShaderPixelInterlock_       = {},
                                                                           Bool32 fragmentShaderShadingRateInterlock_ = {},
                                                                           void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ }
      , fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ }
      , fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
      setFragmentShaderSampleInterlock( Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
      setFragmentShaderPixelInterlock( Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
      setFragmentShaderShadingRateInterlock( Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) &&
             ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) &&
             ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
    void *        pNext                              = {};
    Bool32        fragmentShaderSampleInterlock      = {};
    Bool32        fragmentShaderPixelInterlock       = {};
    Bool32        fragmentShaderShadingRateInterlock = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV.html
  struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
  {
    using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( Bool32 fragmentShadingRateEnums_         = {},
                                                                           Bool32 supersampleFragmentShadingRates_  = {},
                                                                           Bool32 noInvocationFragmentShadingRates_ = {},
                                                                           void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentShadingRateEnums{ fragmentShadingRateEnums_ }
      , supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ }
      , noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
      setFragmentShadingRateEnums( Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShadingRateEnums = fragmentShadingRateEnums_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
      setSupersampleFragmentShadingRates( Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
    {
      supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
      setNoInvocationFragmentShadingRates( Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
    {
      noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) &&
             ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) &&
             ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
    void *        pNext                            = {};
    Bool32        fragmentShadingRateEnums         = {};
    Bool32        supersampleFragmentShadingRates  = {};
    Bool32        noInvocationFragmentShadingRates = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV.html
  struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
  {
    using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = SampleCountFlagBits::e1,
                                                          void *              pNext_                                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
      operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SampleCountFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                                 = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
    void *              pNext                                 = {};
    SampleCountFlagBits maxFragmentShadingRateInvocationCount = SampleCountFlagBits::e1;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateFeaturesKHR.html
  struct PhysicalDeviceFragmentShadingRateFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( Bool32 pipelineFragmentShadingRate_   = {},
                                                                       Bool32 primitiveFragmentShadingRate_  = {},
                                                                       Bool32 attachmentFragmentShadingRate_ = {},
                                                                       void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ }
      , primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ }
      , attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
      setPipelineFragmentShadingRate( Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
      setPrimitiveFragmentShadingRate( Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
      setAttachmentFragmentShadingRate( Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) &&
             ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
    void *        pNext                         = {};
    Bool32        pipelineFragmentShadingRate   = {};
    Bool32        primitiveFragmentShadingRate  = {};
    Bool32        attachmentFragmentShadingRate = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShadingRateFeaturesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateKHR.html
  struct PhysicalDeviceFragmentShadingRateKHR
  {
    using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShadingRateKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRateKHR( SampleCountFlags sampleCounts_ = {}, Extent2D fragmentSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampleCounts{ sampleCounts_ }
      , fragmentSize{ fragmentSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SampleCountFlags const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sampleCounts, fragmentSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType        = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
    void *           pNext        = {};
    SampleCountFlags sampleCounts = {};
    Extent2D         fragmentSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShadingRateKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRatePropertiesKHR.html
  struct PhysicalDeviceFragmentShadingRatePropertiesKHR
  {
    using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRatePropertiesKHR( Extent2D            minFragmentShadingRateAttachmentTexelSize_            = {},
                                                      Extent2D            maxFragmentShadingRateAttachmentTexelSize_            = {},
                                                      uint32_t            maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {},
                                                      Bool32              primitiveFragmentShadingRateWithMultipleViewports_    = {},
                                                      Bool32              layeredShadingRateAttachments_                        = {},
                                                      Bool32              fragmentShadingRateNonTrivialCombinerOps_             = {},
                                                      Extent2D            maxFragmentSize_                                      = {},
                                                      uint32_t            maxFragmentSizeAspectRatio_                           = {},
                                                      uint32_t            maxFragmentShadingRateCoverageSamples_                = {},
                                                      SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_           = SampleCountFlagBits::e1,
                                                      Bool32              fragmentShadingRateWithShaderDepthStencilWrites_      = {},
                                                      Bool32              fragmentShadingRateWithSampleMask_                    = {},
                                                      Bool32              fragmentShadingRateWithShaderSampleMask_              = {},
                                                      Bool32              fragmentShadingRateWithConservativeRasterization_     = {},
                                                      Bool32              fragmentShadingRateWithFragmentShaderInterlock_       = {},
                                                      Bool32              fragmentShadingRateWithCustomSampleLocations_         = {},
                                                      Bool32              fragmentShadingRateStrictMultiplyCombiner_            = {},
                                                      void *              pNext_                                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ }
      , maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ }
      , maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ }
      , primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ }
      , layeredShadingRateAttachments{ layeredShadingRateAttachments_ }
      , fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ }
      , maxFragmentSize{ maxFragmentSize_ }
      , maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ }
      , maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ }
      , maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ }
      , fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ }
      , fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ }
      , fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ }
      , fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ }
      , fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ }
      , fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ }
      , fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Extent2D const &,
               Extent2D const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Extent2D const &,
               uint32_t const &,
               uint32_t const &,
               SampleCountFlagBits const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       minFragmentShadingRateAttachmentTexelSize,
                       maxFragmentShadingRateAttachmentTexelSize,
                       maxFragmentShadingRateAttachmentTexelSizeAspectRatio,
                       primitiveFragmentShadingRateWithMultipleViewports,
                       layeredShadingRateAttachments,
                       fragmentShadingRateNonTrivialCombinerOps,
                       maxFragmentSize,
                       maxFragmentSizeAspectRatio,
                       maxFragmentShadingRateCoverageSamples,
                       maxFragmentShadingRateRasterizationSamples,
                       fragmentShadingRateWithShaderDepthStencilWrites,
                       fragmentShadingRateWithSampleMask,
                       fragmentShadingRateWithShaderSampleMask,
                       fragmentShadingRateWithConservativeRasterization,
                       fragmentShadingRateWithFragmentShaderInterlock,
                       fragmentShadingRateWithCustomSampleLocations,
                       fragmentShadingRateStrictMultiplyCombiner );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) &&
             ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) &&
             ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) &&
             ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) &&
             ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) &&
             ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) &&
             ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) &&
             ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) &&
             ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) &&
             ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) &&
             ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) &&
             ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) &&
             ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) &&
             ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) &&
             ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) &&
             ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
#  endif
    }

    bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                                                = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
    void *              pNext                                                = {};
    Extent2D            minFragmentShadingRateAttachmentTexelSize            = {};
    Extent2D            maxFragmentShadingRateAttachmentTexelSize            = {};
    uint32_t            maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
    Bool32              primitiveFragmentShadingRateWithMultipleViewports    = {};
    Bool32              layeredShadingRateAttachments                        = {};
    Bool32              fragmentShadingRateNonTrivialCombinerOps             = {};
    Extent2D            maxFragmentSize                                      = {};
    uint32_t            maxFragmentSizeAspectRatio                           = {};
    uint32_t            maxFragmentShadingRateCoverageSamples                = {};
    SampleCountFlagBits maxFragmentShadingRateRasterizationSamples           = SampleCountFlagBits::e1;
    Bool32              fragmentShadingRateWithShaderDepthStencilWrites      = {};
    Bool32              fragmentShadingRateWithSampleMask                    = {};
    Bool32              fragmentShadingRateWithShaderSampleMask              = {};
    Bool32              fragmentShadingRateWithConservativeRasterization     = {};
    Bool32              fragmentShadingRateWithFragmentShaderInterlock       = {};
    Bool32              fragmentShadingRateWithCustomSampleLocations         = {};
    Bool32              fragmentShadingRateStrictMultiplyCombiner            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFragmentShadingRatePropertiesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceFrameBoundaryFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFrameBoundaryFeaturesEXT.html
  struct PhysicalDeviceFrameBoundaryFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , frameBoundary{ frameBoundary_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast<PhysicalDeviceFrameBoundaryFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceFrameBoundaryFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( Bool32 frameBoundary_ ) VULKAN_HPP_NOEXCEPT
    {
      frameBoundary = frameBoundary_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFrameBoundaryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFrameBoundaryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceFrameBoundaryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFrameBoundaryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceFrameBoundaryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, frameBoundary );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameBoundary == rhs.frameBoundary );
#  endif
    }

    bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT;
    void *        pNext         = {};
    Bool32        frameBoundary = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceFrameBoundaryFeaturesEXT>
  {
    using Type = PhysicalDeviceFrameBoundaryFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT>
  {
    using Type = PhysicalDeviceFrameBoundaryFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceGlobalPriorityQueryFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGlobalPriorityQueryFeatures.html
  struct PhysicalDeviceGlobalPriorityQueryFeatures
  {
    using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( Bool32 globalPriorityQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , globalPriorityQuery{ globalPriorityQuery_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setGlobalPriorityQuery( Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      globalPriorityQuery = globalPriorityQuery_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeatures *>( this );
    }

    operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeatures *>( this );
    }

    operator VkPhysicalDeviceGlobalPriorityQueryFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeatures *>( this );
    }

    operator VkPhysicalDeviceGlobalPriorityQueryFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, globalPriorityQuery );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery );
#  endif
    }

    bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures;
    void *        pNext               = {};
    Bool32        globalPriorityQuery = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceGlobalPriorityQueryFeatures>
  {
    using Type = PhysicalDeviceGlobalPriorityQueryFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures>
  {
    using Type = PhysicalDeviceGlobalPriorityQueryFeatures;
  };

  using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures;
  using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures;

  // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT.html
  struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( Bool32 graphicsPipelineLibrary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , graphicsPipelineLibrary{ graphicsPipelineLibrary_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setGraphicsPipelineLibrary( Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT
    {
      graphicsPipelineLibrary = graphicsPipelineLibrary_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, graphicsPipelineLibrary );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary );
#  endif
    }

    bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
    void *        pNext                   = {};
    Bool32        graphicsPipelineLibrary = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
  {
    using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>
  {
    using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT.html
  struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( Bool32 graphicsPipelineLibraryFastLinking_                        = {},
                                                                             Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {},
                                                                             void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ }
      , graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &
      operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) &&
             ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration );
#  endif
    }

    bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                     = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
    void *        pNext                                                     = {};
    Bool32        graphicsPipelineLibraryFastLinking                        = {};
    Bool32        graphicsPipelineLibraryIndependentInterpolationDecoration = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
  {
    using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>
  {
    using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceGroupProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGroupProperties.html
  struct PhysicalDeviceGroupProperties
  {
    using NativeType = VkPhysicalDeviceGroupProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceGroupProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t                                                     physicalDeviceCount_ = {},
                                                           std::array<PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_     = {},
                                                           Bool32                                                       subsetAllocation_    = {},
                                                           void *                                                       pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , physicalDeviceCount{ physicalDeviceCount_ }
      , physicalDevices{ physicalDevices_ }
      , subsetAllocation{ subsetAllocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties *>( this );
    }

    operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGroupProperties *>( this );
    }

    operator VkPhysicalDeviceGroupProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceGroupProperties *>( this );
    }

    operator VkPhysicalDeviceGroupProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceGroupProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, ArrayWrapper1D<PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < physicalDeviceCount; ++i )
      {
        if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 )
          return cmp;
      }
      if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
             ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( PhysicalDevice ) ) == 0 ) &&
             ( subsetAllocation == rhs.subsetAllocation );
    }

    bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                            sType               = StructureType::ePhysicalDeviceGroupProperties;
    void *                                                   pNext               = {};
    uint32_t                                                 physicalDeviceCount = {};
    ArrayWrapper1D<PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices     = {};
    Bool32                                                   subsetAllocation    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceGroupProperties>
  {
    using Type = PhysicalDeviceGroupProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
  {
    using Type = PhysicalDeviceGroupProperties;
  };

  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;

  // wrapper struct for struct VkPhysicalDeviceHdrVividFeaturesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHdrVividFeaturesHUAWEI.html
  struct PhysicalDeviceHdrVividFeaturesHUAWEI
  {
    using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hdrVivid{ hdrVivid_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceHdrVividFeaturesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceHdrVividFeaturesHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT
    {
      hdrVivid = hdrVivid_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceHdrVividFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceHdrVividFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceHdrVividFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceHdrVividFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceHdrVividFeaturesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hdrVivid );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid );
#  endif
    }

    bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI;
    void *        pNext    = {};
    Bool32        hdrVivid = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceHdrVividFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceHdrVividFeaturesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceHdrVividFeaturesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceHostImageCopyFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyFeatures.html
  struct PhysicalDeviceHostImageCopyFeatures
  {
    using NativeType = VkPhysicalDeviceHostImageCopyFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceHostImageCopyFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hostImageCopy{ hostImageCopy_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast<PhysicalDeviceHostImageCopyFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceHostImageCopyFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT
    {
      hostImageCopy = hostImageCopy_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyFeatures *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceHostImageCopyFeatures *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceHostImageCopyFeatures *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceHostImageCopyFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hostImageCopy );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy );
#  endif
    }

    bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceHostImageCopyFeatures;
    void *        pNext         = {};
    Bool32        hostImageCopy = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceHostImageCopyFeatures>
  {
    using Type = PhysicalDeviceHostImageCopyFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyFeatures>
  {
    using Type = PhysicalDeviceHostImageCopyFeatures;
  };

  using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures;

  // wrapper struct for struct VkPhysicalDeviceHostImageCopyProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyProperties.html
  struct PhysicalDeviceHostImageCopyProperties
  {
    using NativeType = VkPhysicalDeviceHostImageCopyProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceHostImageCopyProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( uint32_t                                  copySrcLayoutCount_              = {},
                                                                   ImageLayout *                             pCopySrcLayouts_                 = {},
                                                                   uint32_t                                  copyDstLayoutCount_              = {},
                                                                   ImageLayout *                             pCopyDstLayouts_                 = {},
                                                                   std::array<uint8_t, VK_UUID_SIZE> const & optimalTilingLayoutUUID_         = {},
                                                                   Bool32                                    identicalMemoryTypeRequirements_ = {},
                                                                   void *                                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , copySrcLayoutCount{ copySrcLayoutCount_ }
      , pCopySrcLayouts{ pCopySrcLayouts_ }
      , copyDstLayoutCount{ copyDstLayoutCount_ }
      , pCopyDstLayouts{ pCopyDstLayouts_ }
      , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ }
      , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast<PhysicalDeviceHostImageCopyProperties const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceHostImageCopyProperties( ArrayProxyNoTemporaries<ImageLayout> const & copySrcLayouts_,
                                           ArrayProxyNoTemporaries<ImageLayout> const & copyDstLayouts_                  = {},
                                           std::array<uint8_t, VK_UUID_SIZE> const &    optimalTilingLayoutUUID_         = {},
                                           Bool32                                       identicalMemoryTypeRequirements_ = {},
                                           void *                                       pNext_                           = nullptr )
      : pNext( pNext_ )
      , copySrcLayoutCount( static_cast<uint32_t>( copySrcLayouts_.size() ) )
      , pCopySrcLayouts( copySrcLayouts_.data() )
      , copyDstLayoutCount( static_cast<uint32_t>( copyDstLayouts_.size() ) )
      , pCopyDstLayouts( copyDstLayouts_.data() )
      , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ )
      , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceHostImageCopyProperties const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      copySrcLayoutCount = copySrcLayoutCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopySrcLayouts( ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pCopySrcLayouts = pCopySrcLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceHostImageCopyProperties & setCopySrcLayouts( ArrayProxyNoTemporaries<ImageLayout> const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      copySrcLayoutCount = static_cast<uint32_t>( copySrcLayouts_.size() );
      pCopySrcLayouts    = copySrcLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      copyDstLayoutCount = copyDstLayoutCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopyDstLayouts( ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pCopyDstLayouts = pCopyDstLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceHostImageCopyProperties & setCopyDstLayouts( ArrayProxyNoTemporaries<ImageLayout> const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      copyDstLayoutCount = static_cast<uint32_t>( copyDstLayouts_.size() );
      pCopyDstLayouts    = copyDstLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties &
      setOptimalTilingLayoutUUID( std::array<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT
    {
      optimalTilingLayoutUUID = optimalTilingLayoutUUID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties &
      setIdenticalMemoryTypeRequirements( Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT
    {
      identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceHostImageCopyProperties *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceHostImageCopyProperties *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceHostImageCopyProperties *>( this );
    }

    operator VkPhysicalDeviceHostImageCopyProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceHostImageCopyProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               ImageLayout * const &,
               uint32_t const &,
               ImageLayout * const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) &&
             ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) &&
             ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements );
#  endif
    }

    bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                           = StructureType::ePhysicalDeviceHostImageCopyProperties;
    void *                                pNext                           = {};
    uint32_t                              copySrcLayoutCount              = {};
    ImageLayout *                         pCopySrcLayouts                 = {};
    uint32_t                              copyDstLayoutCount              = {};
    ImageLayout *                         pCopyDstLayouts                 = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID         = {};
    Bool32                                identicalMemoryTypeRequirements = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceHostImageCopyProperties>
  {
    using Type = PhysicalDeviceHostImageCopyProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceHostImageCopyProperties>
  {
    using Type = PhysicalDeviceHostImageCopyProperties;
  };

  using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties;

  // wrapper struct for struct VkPhysicalDeviceHostQueryResetFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostQueryResetFeatures.html
  struct PhysicalDeviceHostQueryResetFeatures
  {
    using NativeType = VkPhysicalDeviceHostQueryResetFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceHostQueryResetFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hostQueryReset{ hostQueryReset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
    {
      hostQueryReset = hostQueryReset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

    operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

    operator VkPhysicalDeviceHostQueryResetFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

    operator VkPhysicalDeviceHostQueryResetFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hostQueryReset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset );
#  endif
    }

    bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceHostQueryResetFeatures;
    void *        pNext          = {};
    Bool32        hostQueryReset = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceHostQueryResetFeatures>
  {
    using Type = PhysicalDeviceHostQueryResetFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
  {
    using Type = PhysicalDeviceHostQueryResetFeatures;
  };

  using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;

  // wrapper struct for struct VkPhysicalDeviceIDProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIDProperties.html
  struct PhysicalDeviceIDProperties
  {
    using NativeType = VkPhysicalDeviceIDProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceIdProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_      = {},
                                                        std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_      = {},
                                                        std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_      = {},
                                                        uint32_t                                  deviceNodeMask_  = {},
                                                        Bool32                                    deviceLUIDValid_ = {},
                                                        void *                                    pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceUUID{ deviceUUID_ }
      , driverUUID{ driverUUID_ }
      , deviceLUID{ deviceLUID_ }
      , deviceNodeMask{ deviceNodeMask_ }
      , deviceLUIDValid{ deviceLUIDValid_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceIDProperties *>( this );
    }

    operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceIDProperties *>( this );
    }

    operator VkPhysicalDeviceIDProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceIDProperties *>( this );
    }

    operator VkPhysicalDeviceIDProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceIDProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &,
               uint32_t const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
             ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid );
#  endif
    }

    bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType           = StructureType::ePhysicalDeviceIdProperties;
    void *                                pNext           = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID      = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID      = {};
    ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID      = {};
    uint32_t                              deviceNodeMask  = {};
    Bool32                                deviceLUIDValid = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceIDProperties>
  {
    using Type = PhysicalDeviceIDProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
  {
    using Type = PhysicalDeviceIDProperties;
  };

  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;

  // wrapper struct for struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImage2DViewOf3DFeaturesEXT.html
  struct PhysicalDeviceImage2DViewOf3DFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImage2DViewOf3DFeaturesEXT( Bool32 image2DViewOf3D_ = {}, Bool32 sampler2DViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image2DViewOf3D{ image2DViewOf3D_ }
      , sampler2DViewOf3D{ sampler2DViewOf3D_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImage2DViewOf3DFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
    {
      image2DViewOf3D = image2DViewOf3D_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setSampler2DViewOf3D( Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler2DViewOf3D = sampler2DViewOf3D_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D );
#  endif
    }

    bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT;
    void *        pNext             = {};
    Bool32        image2DViewOf3D   = {};
    Bool32        sampler2DViewOf3D = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImage2DViewOf3DFeaturesEXT>
  {
    using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT>
  {
    using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlFeaturesMESA.html
  struct PhysicalDeviceImageAlignmentControlFeaturesMESA
  {
    using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( Bool32 imageAlignmentControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageAlignmentControl{ imageAlignmentControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast<PhysicalDeviceImageAlignmentControlFeaturesMESA const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageAlignmentControlFeaturesMESA const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setImageAlignmentControl( Bool32 imageAlignmentControl_ ) VULKAN_HPP_NOEXCEPT
    {
      imageAlignmentControl = imageAlignmentControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlFeaturesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageAlignmentControlFeaturesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlFeaturesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageAlignmentControlFeaturesMESA *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageAlignmentControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageAlignmentControl == rhs.imageAlignmentControl );
#  endif
    }

    bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA;
    void *        pNext                 = {};
    Bool32        imageAlignmentControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageAlignmentControlFeaturesMESA>
  {
    using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA>
  {
    using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA;
  };

  // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlPropertiesMESA.html
  struct PhysicalDeviceImageAlignmentControlPropertiesMESA
  {
    using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( uint32_t supportedImageAlignmentMask_ = {},
                                                                            void *   pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportedImageAlignmentMask{ supportedImageAlignmentMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast<PhysicalDeviceImageAlignmentControlPropertiesMESA const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageAlignmentControlPropertiesMESA &
      operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageAlignmentControlPropertiesMESA const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlPropertiesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageAlignmentControlPropertiesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageAlignmentControlPropertiesMESA *>( this );
    }

    operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageAlignmentControlPropertiesMESA *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportedImageAlignmentMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask );
#  endif
    }

    bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA;
    void *        pNext                       = {};
    uint32_t      supportedImageAlignmentMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageAlignmentControlPropertiesMESA>
  {
    using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA>
  {
    using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA;
  };

  // wrapper struct for struct VkPhysicalDeviceImageCompressionControlFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlFeaturesEXT.html
  struct PhysicalDeviceImageCompressionControlFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( Bool32 imageCompressionControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageCompressionControl{ imageCompressionControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageCompressionControlFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setImageCompressionControl( Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCompressionControl = imageCompressionControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageCompressionControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageCompressionControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl );
#  endif
    }

    bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT;
    void *        pNext                   = {};
    Bool32        imageCompressionControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageCompressionControlFeaturesEXT>
  {
    using Type = PhysicalDeviceImageCompressionControlFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT>
  {
    using Type = PhysicalDeviceImageCompressionControlFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT.html
  struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( Bool32 imageCompressionControlSwapchain_ = {},
                                                                                    void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
      operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
      operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &
      setImageCompressionControlSwapchain( Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCompressionControlSwapchain = imageCompressionControlSwapchain_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageCompressionControlSwapchain );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain );
#  endif
    }

    bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
    void *        pNext                            = {};
    Bool32        imageCompressionControlSwapchain = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
  {
    using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>
  {
    using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html
  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
  {
    using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t         drmFormatModifier_     = {},
                                                                      SharingMode      sharingMode_           = SharingMode::eExclusive,
                                                                      uint32_t         queueFamilyIndexCount_ = {},
                                                                      const uint32_t * pQueueFamilyIndices_   = {},
                                                                      const void *     pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , drmFormatModifier{ drmFormatModifier_ }
      , sharingMode{ sharingMode_ }
      , queueFamilyIndexCount{ queueFamilyIndexCount_ }
      , pQueueFamilyIndices{ pQueueFamilyIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t                                        drmFormatModifier_,
                                                 SharingMode                                     sharingMode_,
                                                 ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                                                 const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , drmFormatModifier( drmFormatModifier_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifier = drmFormatModifier_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      setQueueFamilyIndices( ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, SharingMode const &, uint32_t const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) &&
             ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
#  endif
    }

    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                 = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
    const void *     pNext                 = {};
    uint64_t         drmFormatModifier     = {};
    SharingMode      sharingMode           = SharingMode::eExclusive;
    uint32_t         queueFamilyIndexCount = {};
    const uint32_t * pQueueFamilyIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageDrmFormatModifierInfoEXT>
  {
    using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
  {
    using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageFormatInfo2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageFormatInfo2.html
  struct PhysicalDeviceImageFormatInfo2
  {
    using NativeType = VkPhysicalDeviceImageFormatInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageFormatInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( Format           format_ = Format::eUndefined,
                                                         ImageType        type_   = ImageType::e1D,
                                                         ImageTiling      tiling_ = ImageTiling::eOptimal,
                                                         ImageUsageFlags  usage_  = {},
                                                         ImageCreateFlags flags_  = {},
                                                         const void *     pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , type{ type_ }
      , tiling{ tiling_ }
      , usage{ usage_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( ImageType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               ImageType const &,
               ImageTiling const &,
               ImageUsageFlags const &,
               ImageCreateFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, format, type, tiling, usage, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) &&
             ( usage == rhs.usage ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType  = StructureType::ePhysicalDeviceImageFormatInfo2;
    const void *     pNext  = {};
    Format           format = Format::eUndefined;
    ImageType        type   = ImageType::e1D;
    ImageTiling      tiling = ImageTiling::eOptimal;
    ImageUsageFlags  usage  = {};
    ImageCreateFlags flags  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageFormatInfo2>
  {
    using Type = PhysicalDeviceImageFormatInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
  {
    using Type = PhysicalDeviceImageFormatInfo2;
  };

  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;

  // wrapper struct for struct VkPhysicalDeviceImageProcessing2FeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2FeaturesQCOM.html
  struct PhysicalDeviceImageProcessing2FeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( Bool32 textureBlockMatch2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , textureBlockMatch2{ textureBlockMatch2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessing2FeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageProcessing2FeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setTextureBlockMatch2( Bool32 textureBlockMatch2_ ) VULKAN_HPP_NOEXCEPT
    {
      textureBlockMatch2 = textureBlockMatch2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageProcessing2FeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageProcessing2FeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageProcessing2FeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2FeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageProcessing2FeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, textureBlockMatch2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureBlockMatch2 == rhs.textureBlockMatch2 );
#  endif
    }

    bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM;
    void *        pNext              = {};
    Bool32        textureBlockMatch2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageProcessing2FeaturesQCOM>
  {
    using Type = PhysicalDeviceImageProcessing2FeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM>
  {
    using Type = PhysicalDeviceImageProcessing2FeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceImageProcessing2PropertiesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2PropertiesQCOM.html
  struct PhysicalDeviceImageProcessing2PropertiesQCOM
  {
    using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( Extent2D maxBlockMatchWindow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxBlockMatchWindow{ maxBlockMatchWindow_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessing2PropertiesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageProcessing2PropertiesQCOM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageProcessing2PropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageProcessing2PropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageProcessing2PropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessing2PropertiesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageProcessing2PropertiesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxBlockMatchWindow );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBlockMatchWindow == rhs.maxBlockMatchWindow );
#  endif
    }

    bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM;
    void *        pNext               = {};
    Extent2D      maxBlockMatchWindow = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageProcessing2PropertiesQCOM>
  {
    using Type = PhysicalDeviceImageProcessing2PropertiesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM>
  {
    using Type = PhysicalDeviceImageProcessing2PropertiesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceImageProcessingFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingFeaturesQCOM.html
  struct PhysicalDeviceImageProcessingFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( Bool32 textureSampleWeighted_ = {},
                                                                    Bool32 textureBoxFilter_      = {},
                                                                    Bool32 textureBlockMatch_     = {},
                                                                    void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , textureSampleWeighted{ textureSampleWeighted_ }
      , textureBoxFilter{ textureBoxFilter_ }
      , textureBlockMatch{ textureBlockMatch_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageProcessingFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureSampleWeighted( Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT
    {
      textureSampleWeighted = textureSampleWeighted_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBoxFilter( Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      textureBoxFilter = textureBoxFilter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBlockMatch( Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT
    {
      textureBlockMatch = textureBlockMatch_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageProcessingFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) &&
             ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch );
#  endif
    }

    bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM;
    void *        pNext                 = {};
    Bool32        textureSampleWeighted = {};
    Bool32        textureBoxFilter      = {};
    Bool32        textureBlockMatch     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageProcessingFeaturesQCOM>
  {
    using Type = PhysicalDeviceImageProcessingFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM>
  {
    using Type = PhysicalDeviceImageProcessingFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceImageProcessingPropertiesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingPropertiesQCOM.html
  struct PhysicalDeviceImageProcessingPropertiesQCOM
  {
    using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_    = {},
                                                                      Extent2D maxWeightFilterDimension_ = {},
                                                                      Extent2D maxBlockMatchRegion_      = {},
                                                                      Extent2D maxBoxFilterBlockSize_    = {},
                                                                      void *   pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxWeightFilterPhases{ maxWeightFilterPhases_ }
      , maxWeightFilterDimension{ maxWeightFilterDimension_ }
      , maxBlockMatchRegion{ maxBlockMatchRegion_ }
      , maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageProcessingPropertiesQCOM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceImageProcessingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageProcessingPropertiesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, Extent2D const &, Extent2D const &, Extent2D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) &&
             ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) &&
             ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize );
#  endif
    }

    bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM;
    void *        pNext                    = {};
    uint32_t      maxWeightFilterPhases    = {};
    Extent2D      maxWeightFilterDimension = {};
    Extent2D      maxBlockMatchRegion      = {};
    Extent2D      maxBoxFilterBlockSize    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageProcessingPropertiesQCOM>
  {
    using Type = PhysicalDeviceImageProcessingPropertiesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM>
  {
    using Type = PhysicalDeviceImageProcessingPropertiesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceImageRobustnessFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageRobustnessFeatures.html
  struct PhysicalDeviceImageRobustnessFeatures
  {
    using NativeType = VkPhysicalDeviceImageRobustnessFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageRobustnessFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( Bool32 robustImageAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , robustImageAccess{ robustImageAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      robustImageAccess = robustImageAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>( this );
    }

    operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures *>( this );
    }

    operator VkPhysicalDeviceImageRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>( this );
    }

    operator VkPhysicalDeviceImageRobustnessFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, robustImageAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceImageRobustnessFeatures;
    void *        pNext             = {};
    Bool32        robustImageAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageRobustnessFeatures>
  {
    using Type = PhysicalDeviceImageRobustnessFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
  {
    using Type = PhysicalDeviceImageRobustnessFeatures;
  };

  using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;

  // wrapper struct for struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT.html
  struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( Bool32 imageSlicedViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageSlicedViewOf3D{ imageSlicedViewOf3D_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setImageSlicedViewOf3D( Bool32 imageSlicedViewOf3D_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSlicedViewOf3D = imageSlicedViewOf3D_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageSlicedViewOf3D );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D );
#  endif
    }

    bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
    void *        pNext               = {};
    Bool32        imageSlicedViewOf3D = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT>
  {
    using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT>
  {
    using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageViewImageFormatInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewImageFormatInfoEXT.html
  struct PhysicalDeviceImageViewImageFormatInfoEXT
  {
    using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( ImageViewType imageViewType_ = ImageViewType::e1D,
                                                                    void *        pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageViewType{ imageViewType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewType = imageViewType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageViewType const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageViewType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType );
#  endif
    }

    bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
    void *        pNext         = {};
    ImageViewType imageViewType = ImageViewType::e1D;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageViewImageFormatInfoEXT>
  {
    using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
  {
    using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImageViewMinLodFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewMinLodFeaturesEXT.html
  struct PhysicalDeviceImageViewMinLodFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minLod{ minLod_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
    {
      minLod = minLod_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewMinLodFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minLod );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod );
#  endif
    }

    bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
    void *        pNext  = {};
    Bool32        minLod = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImageViewMinLodFeaturesEXT>
  {
    using Type = PhysicalDeviceImageViewMinLodFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT>
  {
    using Type = PhysicalDeviceImageViewMinLodFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceImagelessFramebufferFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImagelessFramebufferFeatures.html
  struct PhysicalDeviceImagelessFramebufferFeatures
  {
    using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( Bool32 imagelessFramebuffer_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imagelessFramebuffer{ imagelessFramebuffer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      imagelessFramebuffer = imagelessFramebuffer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

    operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

    operator VkPhysicalDeviceImagelessFramebufferFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

    operator VkPhysicalDeviceImagelessFramebufferFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imagelessFramebuffer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
#  endif
    }

    bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
    void *        pNext                = {};
    Bool32        imagelessFramebuffer = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceImagelessFramebufferFeatures>
  {
    using Type = PhysicalDeviceImagelessFramebufferFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
  {
    using Type = PhysicalDeviceImagelessFramebufferFeatures;
  };

  using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;

  // wrapper struct for struct VkPhysicalDeviceIndexTypeUint8Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIndexTypeUint8Features.html
  struct PhysicalDeviceIndexTypeUint8Features
  {
    using NativeType = VkPhysicalDeviceIndexTypeUint8Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceIndexTypeUint8Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , indexTypeUint8{ indexTypeUint8_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast<PhysicalDeviceIndexTypeUint8Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceIndexTypeUint8Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeUint8 = indexTypeUint8_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8Features *>( this );
    }

    operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8Features *>( this );
    }

    operator VkPhysicalDeviceIndexTypeUint8Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8Features *>( this );
    }

    operator VkPhysicalDeviceIndexTypeUint8Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, indexTypeUint8 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 );
#  endif
    }

    bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceIndexTypeUint8Features;
    void *        pNext          = {};
    Bool32        indexTypeUint8 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceIndexTypeUint8Features>
  {
    using Type = PhysicalDeviceIndexTypeUint8Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8Features>
  {
    using Type = PhysicalDeviceIndexTypeUint8Features;
  };

  using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features;
  using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features;

  // wrapper struct for struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInheritedViewportScissorFeaturesNV.html
  struct PhysicalDeviceInheritedViewportScissorFeaturesNV
  {
    using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( Bool32 inheritedViewportScissor2D_ = {},
                                                                           void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , inheritedViewportScissor2D{ inheritedViewportScissor2D_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV &
      setInheritedViewportScissor2D( Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedViewportScissor2D = inheritedViewportScissor2D_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, inheritedViewportScissor2D );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
#  endif
    }

    bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
    void *        pNext                      = {};
    Bool32        inheritedViewportScissor2D = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceInheritedViewportScissorFeaturesNV>
  {
    using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
  {
    using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockFeatures.html
  struct PhysicalDeviceInlineUniformBlockFeatures
  {
    using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( Bool32 inlineUniformBlock_                                 = {},
                                                                   Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
                                                                   void * pNext_                                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , inlineUniformBlock{ inlineUniformBlock_ }
      , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock( Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
    {
      inlineUniformBlock = inlineUniformBlock_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &
      setDescriptorBindingInlineUniformBlockUpdateAfterBind( Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) &&
             ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
#  endif
    }

    bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                              = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
    void *        pNext                                              = {};
    Bool32        inlineUniformBlock                                 = {};
    Bool32        descriptorBindingInlineUniformBlockUpdateAfterBind = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceInlineUniformBlockFeatures>
  {
    using Type = PhysicalDeviceInlineUniformBlockFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
  {
    using Type = PhysicalDeviceInlineUniformBlockFeatures;
  };

  using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;

  // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockProperties.html
  struct PhysicalDeviceInlineUniformBlockProperties
  {
    using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceInlineUniformBlockProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_                               = {},
                                                                     uint32_t maxPerStageDescriptorInlineUniformBlocks_                = {},
                                                                     uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
                                                                     uint32_t maxDescriptorSetInlineUniformBlocks_                     = {},
                                                                     uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_      = {},
                                                                     void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ }
      , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ }
      , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ }
      , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ }
      , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxInlineUniformBlockSize,
                       maxPerStageDescriptorInlineUniformBlocks,
                       maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks,
                       maxDescriptorSetInlineUniformBlocks,
                       maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
             ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
             ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
             ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
             ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
#  endif
    }

    bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                   = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
    void *        pNext                                                   = {};
    uint32_t      maxInlineUniformBlockSize                               = {};
    uint32_t      maxPerStageDescriptorInlineUniformBlocks                = {};
    uint32_t      maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
    uint32_t      maxDescriptorSetInlineUniformBlocks                     = {};
    uint32_t      maxDescriptorSetUpdateAfterBindInlineUniformBlocks      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceInlineUniformBlockProperties>
  {
    using Type = PhysicalDeviceInlineUniformBlockProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
  {
    using Type = PhysicalDeviceInlineUniformBlockProperties;
  };

  using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;

  // wrapper struct for struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html
  struct PhysicalDeviceInvocationMaskFeaturesHUAWEI
  {
    using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( Bool32 invocationMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , invocationMask{ invocationMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
    {
      invocationMask = invocationMask_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, invocationMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask );
#  endif
    }

    bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
    void *        pNext          = {};
    Bool32        invocationMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesKHR.html
  struct PhysicalDeviceLayeredApiPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( uint32_t                    vendorID_   = {},
                                                                   uint32_t                    deviceID_   = {},
                                                                   PhysicalDeviceLayeredApiKHR layeredAPI_ = PhysicalDeviceLayeredApiKHR::eVulkan,
                                                                   std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {},
                                                                   void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vendorID{ vendorID_ }
      , deviceID{ deviceID_ }
      , layeredAPI{ layeredAPI_ }
      , deviceName{ deviceName_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               PhysicalDeviceLayeredApiKHR const &,
               ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
             ( layeredAPI == rhs.layeredAPI ) && ( deviceName == rhs.deviceName );
#  endif
    }

    bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                          sType      = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR;
    void *                                                 pNext      = {};
    uint32_t                                               vendorID   = {};
    uint32_t                                               deviceID   = {};
    PhysicalDeviceLayeredApiKHR                            layeredAPI = PhysicalDeviceLayeredApiKHR::eVulkan;
    ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLayeredApiPropertiesKHR>
  {
    using Type = PhysicalDeviceLayeredApiPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiPropertiesKHR>
  {
    using Type = PhysicalDeviceLayeredApiPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesListKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesListKHR.html
  struct PhysicalDeviceLayeredApiPropertiesListKHR
  {
    using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( uint32_t                                layeredApiCount_ = {},
                                                                       PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_    = {},
                                                                       void *                                  pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , layeredApiCount{ layeredApiCount_ }
      , pLayeredApis{ pLayeredApis_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesListKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceLayeredApiPropertiesListKHR( ArrayProxyNoTemporaries<PhysicalDeviceLayeredApiPropertiesKHR> const & layeredApis_, void * pNext_ = nullptr )
      : pNext( pNext_ ), layeredApiCount( static_cast<uint32_t>( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLayeredApiPropertiesListKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layeredApiCount = layeredApiCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR &
      setPLayeredApis( PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) VULKAN_HPP_NOEXCEPT
    {
      pLayeredApis = pLayeredApis_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceLayeredApiPropertiesListKHR &
      setLayeredApis( ArrayProxyNoTemporaries<PhysicalDeviceLayeredApiPropertiesKHR> const & layeredApis_ ) VULKAN_HPP_NOEXCEPT
    {
      layeredApiCount = static_cast<uint32_t>( layeredApis_.size() );
      pLayeredApis    = layeredApis_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesListKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesListKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesListKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLayeredApiPropertiesListKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiPropertiesListKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLayeredApiPropertiesListKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, PhysicalDeviceLayeredApiPropertiesKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, layeredApiCount, pLayeredApis );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layeredApiCount == rhs.layeredApiCount ) && ( pLayeredApis == rhs.pLayeredApis );
#  endif
    }

    bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType           = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR;
    void *                                  pNext           = {};
    uint32_t                                layeredApiCount = {};
    PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLayeredApiPropertiesListKHR>
  {
    using Type = PhysicalDeviceLayeredApiPropertiesListKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR>
  {
    using Type = PhysicalDeviceLayeredApiPropertiesListKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceLimits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLimits.html
  struct PhysicalDeviceLimits
  {
    using NativeType = VkPhysicalDeviceLimits;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t                        maxImageDimension1D_                             = {},
                                                  uint32_t                        maxImageDimension2D_                             = {},
                                                  uint32_t                        maxImageDimension3D_                             = {},
                                                  uint32_t                        maxImageDimensionCube_                           = {},
                                                  uint32_t                        maxImageArrayLayers_                             = {},
                                                  uint32_t                        maxTexelBufferElements_                          = {},
                                                  uint32_t                        maxUniformBufferRange_                           = {},
                                                  uint32_t                        maxStorageBufferRange_                           = {},
                                                  uint32_t                        maxPushConstantsSize_                            = {},
                                                  uint32_t                        maxMemoryAllocationCount_                        = {},
                                                  uint32_t                        maxSamplerAllocationCount_                       = {},
                                                  DeviceSize                      bufferImageGranularity_                          = {},
                                                  DeviceSize                      sparseAddressSpaceSize_                          = {},
                                                  uint32_t                        maxBoundDescriptorSets_                          = {},
                                                  uint32_t                        maxPerStageDescriptorSamplers_                   = {},
                                                  uint32_t                        maxPerStageDescriptorUniformBuffers_             = {},
                                                  uint32_t                        maxPerStageDescriptorStorageBuffers_             = {},
                                                  uint32_t                        maxPerStageDescriptorSampledImages_              = {},
                                                  uint32_t                        maxPerStageDescriptorStorageImages_              = {},
                                                  uint32_t                        maxPerStageDescriptorInputAttachments_           = {},
                                                  uint32_t                        maxPerStageResources_                            = {},
                                                  uint32_t                        maxDescriptorSetSamplers_                        = {},
                                                  uint32_t                        maxDescriptorSetUniformBuffers_                  = {},
                                                  uint32_t                        maxDescriptorSetUniformBuffersDynamic_           = {},
                                                  uint32_t                        maxDescriptorSetStorageBuffers_                  = {},
                                                  uint32_t                        maxDescriptorSetStorageBuffersDynamic_           = {},
                                                  uint32_t                        maxDescriptorSetSampledImages_                   = {},
                                                  uint32_t                        maxDescriptorSetStorageImages_                   = {},
                                                  uint32_t                        maxDescriptorSetInputAttachments_                = {},
                                                  uint32_t                        maxVertexInputAttributes_                        = {},
                                                  uint32_t                        maxVertexInputBindings_                          = {},
                                                  uint32_t                        maxVertexInputAttributeOffset_                   = {},
                                                  uint32_t                        maxVertexInputBindingStride_                     = {},
                                                  uint32_t                        maxVertexOutputComponents_                       = {},
                                                  uint32_t                        maxTessellationGenerationLevel_                  = {},
                                                  uint32_t                        maxTessellationPatchSize_                        = {},
                                                  uint32_t                        maxTessellationControlPerVertexInputComponents_  = {},
                                                  uint32_t                        maxTessellationControlPerVertexOutputComponents_ = {},
                                                  uint32_t                        maxTessellationControlPerPatchOutputComponents_  = {},
                                                  uint32_t                        maxTessellationControlTotalOutputComponents_     = {},
                                                  uint32_t                        maxTessellationEvaluationInputComponents_        = {},
                                                  uint32_t                        maxTessellationEvaluationOutputComponents_       = {},
                                                  uint32_t                        maxGeometryShaderInvocations_                    = {},
                                                  uint32_t                        maxGeometryInputComponents_                      = {},
                                                  uint32_t                        maxGeometryOutputComponents_                     = {},
                                                  uint32_t                        maxGeometryOutputVertices_                       = {},
                                                  uint32_t                        maxGeometryTotalOutputComponents_                = {},
                                                  uint32_t                        maxFragmentInputComponents_                      = {},
                                                  uint32_t                        maxFragmentOutputAttachments_                    = {},
                                                  uint32_t                        maxFragmentDualSrcAttachments_                   = {},
                                                  uint32_t                        maxFragmentCombinedOutputResources_              = {},
                                                  uint32_t                        maxComputeSharedMemorySize_                      = {},
                                                  std::array<uint32_t, 3> const & maxComputeWorkGroupCount_                        = {},
                                                  uint32_t                        maxComputeWorkGroupInvocations_                  = {},
                                                  std::array<uint32_t, 3> const & maxComputeWorkGroupSize_                         = {},
                                                  uint32_t                        subPixelPrecisionBits_                           = {},
                                                  uint32_t                        subTexelPrecisionBits_                           = {},
                                                  uint32_t                        mipmapPrecisionBits_                             = {},
                                                  uint32_t                        maxDrawIndexedIndexValue_                        = {},
                                                  uint32_t                        maxDrawIndirectCount_                            = {},
                                                  float                           maxSamplerLodBias_                               = {},
                                                  float                           maxSamplerAnisotropy_                            = {},
                                                  uint32_t                        maxViewports_                                    = {},
                                                  std::array<uint32_t, 2> const & maxViewportDimensions_                           = {},
                                                  std::array<float, 2> const &    viewportBoundsRange_                             = {},
                                                  uint32_t                        viewportSubPixelBits_                            = {},
                                                  size_t                          minMemoryMapAlignment_                           = {},
                                                  DeviceSize                      minTexelBufferOffsetAlignment_                   = {},
                                                  DeviceSize                      minUniformBufferOffsetAlignment_                 = {},
                                                  DeviceSize                      minStorageBufferOffsetAlignment_                 = {},
                                                  int32_t                         minTexelOffset_                                  = {},
                                                  uint32_t                        maxTexelOffset_                                  = {},
                                                  int32_t                         minTexelGatherOffset_                            = {},
                                                  uint32_t                        maxTexelGatherOffset_                            = {},
                                                  float                           minInterpolationOffset_                          = {},
                                                  float                           maxInterpolationOffset_                          = {},
                                                  uint32_t                        subPixelInterpolationOffsetBits_                 = {},
                                                  uint32_t                        maxFramebufferWidth_                             = {},
                                                  uint32_t                        maxFramebufferHeight_                            = {},
                                                  uint32_t                        maxFramebufferLayers_                            = {},
                                                  SampleCountFlags                framebufferColorSampleCounts_                    = {},
                                                  SampleCountFlags                framebufferDepthSampleCounts_                    = {},
                                                  SampleCountFlags                framebufferStencilSampleCounts_                  = {},
                                                  SampleCountFlags                framebufferNoAttachmentsSampleCounts_            = {},
                                                  uint32_t                        maxColorAttachments_                             = {},
                                                  SampleCountFlags                sampledImageColorSampleCounts_                   = {},
                                                  SampleCountFlags                sampledImageIntegerSampleCounts_                 = {},
                                                  SampleCountFlags                sampledImageDepthSampleCounts_                   = {},
                                                  SampleCountFlags                sampledImageStencilSampleCounts_                 = {},
                                                  SampleCountFlags                storageImageSampleCounts_                        = {},
                                                  uint32_t                        maxSampleMaskWords_                              = {},
                                                  Bool32                          timestampComputeAndGraphics_                     = {},
                                                  float                           timestampPeriod_                                 = {},
                                                  uint32_t                        maxClipDistances_                                = {},
                                                  uint32_t                        maxCullDistances_                                = {},
                                                  uint32_t                        maxCombinedClipAndCullDistances_                 = {},
                                                  uint32_t                        discreteQueuePriorities_                         = {},
                                                  std::array<float, 2> const &    pointSizeRange_                                  = {},
                                                  std::array<float, 2> const &    lineWidthRange_                                  = {},
                                                  float                           pointSizeGranularity_                            = {},
                                                  float                           lineWidthGranularity_                            = {},
                                                  Bool32                          strictLines_                                     = {},
                                                  Bool32                          standardSampleLocations_                         = {},
                                                  DeviceSize                      optimalBufferCopyOffsetAlignment_                = {},
                                                  DeviceSize                      optimalBufferCopyRowPitchAlignment_              = {},
                                                  DeviceSize                      nonCoherentAtomSize_                             = {} ) VULKAN_HPP_NOEXCEPT
      : maxImageDimension1D{ maxImageDimension1D_ }
      , maxImageDimension2D{ maxImageDimension2D_ }
      , maxImageDimension3D{ maxImageDimension3D_ }
      , maxImageDimensionCube{ maxImageDimensionCube_ }
      , maxImageArrayLayers{ maxImageArrayLayers_ }
      , maxTexelBufferElements{ maxTexelBufferElements_ }
      , maxUniformBufferRange{ maxUniformBufferRange_ }
      , maxStorageBufferRange{ maxStorageBufferRange_ }
      , maxPushConstantsSize{ maxPushConstantsSize_ }
      , maxMemoryAllocationCount{ maxMemoryAllocationCount_ }
      , maxSamplerAllocationCount{ maxSamplerAllocationCount_ }
      , bufferImageGranularity{ bufferImageGranularity_ }
      , sparseAddressSpaceSize{ sparseAddressSpaceSize_ }
      , maxBoundDescriptorSets{ maxBoundDescriptorSets_ }
      , maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ }
      , maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ }
      , maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ }
      , maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ }
      , maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ }
      , maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ }
      , maxPerStageResources{ maxPerStageResources_ }
      , maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ }
      , maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ }
      , maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ }
      , maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ }
      , maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ }
      , maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ }
      , maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ }
      , maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ }
      , maxVertexInputAttributes{ maxVertexInputAttributes_ }
      , maxVertexInputBindings{ maxVertexInputBindings_ }
      , maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ }
      , maxVertexInputBindingStride{ maxVertexInputBindingStride_ }
      , maxVertexOutputComponents{ maxVertexOutputComponents_ }
      , maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ }
      , maxTessellationPatchSize{ maxTessellationPatchSize_ }
      , maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ }
      , maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ }
      , maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ }
      , maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ }
      , maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ }
      , maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ }
      , maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ }
      , maxGeometryInputComponents{ maxGeometryInputComponents_ }
      , maxGeometryOutputComponents{ maxGeometryOutputComponents_ }
      , maxGeometryOutputVertices{ maxGeometryOutputVertices_ }
      , maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ }
      , maxFragmentInputComponents{ maxFragmentInputComponents_ }
      , maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ }
      , maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ }
      , maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ }
      , maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ }
      , maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ }
      , maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ }
      , maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ }
      , subPixelPrecisionBits{ subPixelPrecisionBits_ }
      , subTexelPrecisionBits{ subTexelPrecisionBits_ }
      , mipmapPrecisionBits{ mipmapPrecisionBits_ }
      , maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ }
      , maxDrawIndirectCount{ maxDrawIndirectCount_ }
      , maxSamplerLodBias{ maxSamplerLodBias_ }
      , maxSamplerAnisotropy{ maxSamplerAnisotropy_ }
      , maxViewports{ maxViewports_ }
      , maxViewportDimensions{ maxViewportDimensions_ }
      , viewportBoundsRange{ viewportBoundsRange_ }
      , viewportSubPixelBits{ viewportSubPixelBits_ }
      , minMemoryMapAlignment{ minMemoryMapAlignment_ }
      , minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ }
      , minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ }
      , minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ }
      , minTexelOffset{ minTexelOffset_ }
      , maxTexelOffset{ maxTexelOffset_ }
      , minTexelGatherOffset{ minTexelGatherOffset_ }
      , maxTexelGatherOffset{ maxTexelGatherOffset_ }
      , minInterpolationOffset{ minInterpolationOffset_ }
      , maxInterpolationOffset{ maxInterpolationOffset_ }
      , subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ }
      , maxFramebufferWidth{ maxFramebufferWidth_ }
      , maxFramebufferHeight{ maxFramebufferHeight_ }
      , maxFramebufferLayers{ maxFramebufferLayers_ }
      , framebufferColorSampleCounts{ framebufferColorSampleCounts_ }
      , framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ }
      , framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ }
      , framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ }
      , maxColorAttachments{ maxColorAttachments_ }
      , sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ }
      , sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ }
      , sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ }
      , sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ }
      , storageImageSampleCounts{ storageImageSampleCounts_ }
      , maxSampleMaskWords{ maxSampleMaskWords_ }
      , timestampComputeAndGraphics{ timestampComputeAndGraphics_ }
      , timestampPeriod{ timestampPeriod_ }
      , maxClipDistances{ maxClipDistances_ }
      , maxCullDistances{ maxCullDistances_ }
      , maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ }
      , discreteQueuePriorities{ discreteQueuePriorities_ }
      , pointSizeRange{ pointSizeRange_ }
      , lineWidthRange{ lineWidthRange_ }
      , pointSizeGranularity{ pointSizeGranularity_ }
      , lineWidthGranularity{ lineWidthGranularity_ }
      , strictLines{ strictLines_ }
      , standardSampleLocations{ standardSampleLocations_ }
      , optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ }
      , optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ }
      , nonCoherentAtomSize{ nonCoherentAtomSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
    {
    }

    PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLimits *>( this );
    }

    operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLimits *>( this );
    }

    operator VkPhysicalDeviceLimits const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLimits *>( this );
    }

    operator VkPhysicalDeviceLimits *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLimits *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               DeviceSize const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               float const &,
               float const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 2> const &,
               ArrayWrapper1D<float, 2> const &,
               uint32_t const &,
               size_t const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceSize const &,
               int32_t const &,
               uint32_t const &,
               int32_t const &,
               uint32_t const &,
               float const &,
               float const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               uint32_t const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               SampleCountFlags const &,
               uint32_t const &,
               Bool32 const &,
               float const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<float, 2> const &,
               ArrayWrapper1D<float, 2> const &,
               float const &,
               float const &,
               Bool32 const &,
               Bool32 const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( maxImageDimension1D,
                       maxImageDimension2D,
                       maxImageDimension3D,
                       maxImageDimensionCube,
                       maxImageArrayLayers,
                       maxTexelBufferElements,
                       maxUniformBufferRange,
                       maxStorageBufferRange,
                       maxPushConstantsSize,
                       maxMemoryAllocationCount,
                       maxSamplerAllocationCount,
                       bufferImageGranularity,
                       sparseAddressSpaceSize,
                       maxBoundDescriptorSets,
                       maxPerStageDescriptorSamplers,
                       maxPerStageDescriptorUniformBuffers,
                       maxPerStageDescriptorStorageBuffers,
                       maxPerStageDescriptorSampledImages,
                       maxPerStageDescriptorStorageImages,
                       maxPerStageDescriptorInputAttachments,
                       maxPerStageResources,
                       maxDescriptorSetSamplers,
                       maxDescriptorSetUniformBuffers,
                       maxDescriptorSetUniformBuffersDynamic,
                       maxDescriptorSetStorageBuffers,
                       maxDescriptorSetStorageBuffersDynamic,
                       maxDescriptorSetSampledImages,
                       maxDescriptorSetStorageImages,
                       maxDescriptorSetInputAttachments,
                       maxVertexInputAttributes,
                       maxVertexInputBindings,
                       maxVertexInputAttributeOffset,
                       maxVertexInputBindingStride,
                       maxVertexOutputComponents,
                       maxTessellationGenerationLevel,
                       maxTessellationPatchSize,
                       maxTessellationControlPerVertexInputComponents,
                       maxTessellationControlPerVertexOutputComponents,
                       maxTessellationControlPerPatchOutputComponents,
                       maxTessellationControlTotalOutputComponents,
                       maxTessellationEvaluationInputComponents,
                       maxTessellationEvaluationOutputComponents,
                       maxGeometryShaderInvocations,
                       maxGeometryInputComponents,
                       maxGeometryOutputComponents,
                       maxGeometryOutputVertices,
                       maxGeometryTotalOutputComponents,
                       maxFragmentInputComponents,
                       maxFragmentOutputAttachments,
                       maxFragmentDualSrcAttachments,
                       maxFragmentCombinedOutputResources,
                       maxComputeSharedMemorySize,
                       maxComputeWorkGroupCount,
                       maxComputeWorkGroupInvocations,
                       maxComputeWorkGroupSize,
                       subPixelPrecisionBits,
                       subTexelPrecisionBits,
                       mipmapPrecisionBits,
                       maxDrawIndexedIndexValue,
                       maxDrawIndirectCount,
                       maxSamplerLodBias,
                       maxSamplerAnisotropy,
                       maxViewports,
                       maxViewportDimensions,
                       viewportBoundsRange,
                       viewportSubPixelBits,
                       minMemoryMapAlignment,
                       minTexelBufferOffsetAlignment,
                       minUniformBufferOffsetAlignment,
                       minStorageBufferOffsetAlignment,
                       minTexelOffset,
                       maxTexelOffset,
                       minTexelGatherOffset,
                       maxTexelGatherOffset,
                       minInterpolationOffset,
                       maxInterpolationOffset,
                       subPixelInterpolationOffsetBits,
                       maxFramebufferWidth,
                       maxFramebufferHeight,
                       maxFramebufferLayers,
                       framebufferColorSampleCounts,
                       framebufferDepthSampleCounts,
                       framebufferStencilSampleCounts,
                       framebufferNoAttachmentsSampleCounts,
                       maxColorAttachments,
                       sampledImageColorSampleCounts,
                       sampledImageIntegerSampleCounts,
                       sampledImageDepthSampleCounts,
                       sampledImageStencilSampleCounts,
                       storageImageSampleCounts,
                       maxSampleMaskWords,
                       timestampComputeAndGraphics,
                       timestampPeriod,
                       maxClipDistances,
                       maxCullDistances,
                       maxCombinedClipAndCullDistances,
                       discreteQueuePriorities,
                       pointSizeRange,
                       lineWidthRange,
                       pointSizeGranularity,
                       lineWidthGranularity,
                       strictLines,
                       standardSampleLocations,
                       optimalBufferCopyOffsetAlignment,
                       optimalBufferCopyRowPitchAlignment,
                       nonCoherentAtomSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLimits const & ) const = default;
#else
    bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) &&
             ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) &&
             ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) &&
             ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) &&
             ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) &&
             ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) &&
             ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) &&
             ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) &&
             ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) &&
             ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) &&
             ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) &&
             ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) &&
             ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) &&
             ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) &&
             ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) &&
             ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) &&
             ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) &&
             ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) &&
             ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) &&
             ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) &&
             ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) &&
             ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) &&
             ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) &&
             ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) &&
             ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) &&
             ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) &&
             ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) &&
             ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) &&
             ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) &&
             ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) &&
             ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) &&
             ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) &&
             ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) &&
             ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) &&
             ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) &&
             ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) &&
             ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) &&
             ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) &&
             ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) &&
             ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) &&
             ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) &&
             ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) &&
             ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) &&
             ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) &&
             ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) &&
             ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) &&
             ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) &&
             ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) &&
             ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) &&
             ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) &&
             ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) &&
             ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) &&
             ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) &&
             ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) &&
             ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) &&
             ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) &&
             ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) &&
             ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) &&
             ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) &&
             ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) &&
             ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) &&
             ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
#  endif
    }

    bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                    maxImageDimension1D                             = {};
    uint32_t                    maxImageDimension2D                             = {};
    uint32_t                    maxImageDimension3D                             = {};
    uint32_t                    maxImageDimensionCube                           = {};
    uint32_t                    maxImageArrayLayers                             = {};
    uint32_t                    maxTexelBufferElements                          = {};
    uint32_t                    maxUniformBufferRange                           = {};
    uint32_t                    maxStorageBufferRange                           = {};
    uint32_t                    maxPushConstantsSize                            = {};
    uint32_t                    maxMemoryAllocationCount                        = {};
    uint32_t                    maxSamplerAllocationCount                       = {};
    DeviceSize                  bufferImageGranularity                          = {};
    DeviceSize                  sparseAddressSpaceSize                          = {};
    uint32_t                    maxBoundDescriptorSets                          = {};
    uint32_t                    maxPerStageDescriptorSamplers                   = {};
    uint32_t                    maxPerStageDescriptorUniformBuffers             = {};
    uint32_t                    maxPerStageDescriptorStorageBuffers             = {};
    uint32_t                    maxPerStageDescriptorSampledImages              = {};
    uint32_t                    maxPerStageDescriptorStorageImages              = {};
    uint32_t                    maxPerStageDescriptorInputAttachments           = {};
    uint32_t                    maxPerStageResources                            = {};
    uint32_t                    maxDescriptorSetSamplers                        = {};
    uint32_t                    maxDescriptorSetUniformBuffers                  = {};
    uint32_t                    maxDescriptorSetUniformBuffersDynamic           = {};
    uint32_t                    maxDescriptorSetStorageBuffers                  = {};
    uint32_t                    maxDescriptorSetStorageBuffersDynamic           = {};
    uint32_t                    maxDescriptorSetSampledImages                   = {};
    uint32_t                    maxDescriptorSetStorageImages                   = {};
    uint32_t                    maxDescriptorSetInputAttachments                = {};
    uint32_t                    maxVertexInputAttributes                        = {};
    uint32_t                    maxVertexInputBindings                          = {};
    uint32_t                    maxVertexInputAttributeOffset                   = {};
    uint32_t                    maxVertexInputBindingStride                     = {};
    uint32_t                    maxVertexOutputComponents                       = {};
    uint32_t                    maxTessellationGenerationLevel                  = {};
    uint32_t                    maxTessellationPatchSize                        = {};
    uint32_t                    maxTessellationControlPerVertexInputComponents  = {};
    uint32_t                    maxTessellationControlPerVertexOutputComponents = {};
    uint32_t                    maxTessellationControlPerPatchOutputComponents  = {};
    uint32_t                    maxTessellationControlTotalOutputComponents     = {};
    uint32_t                    maxTessellationEvaluationInputComponents        = {};
    uint32_t                    maxTessellationEvaluationOutputComponents       = {};
    uint32_t                    maxGeometryShaderInvocations                    = {};
    uint32_t                    maxGeometryInputComponents                      = {};
    uint32_t                    maxGeometryOutputComponents                     = {};
    uint32_t                    maxGeometryOutputVertices                       = {};
    uint32_t                    maxGeometryTotalOutputComponents                = {};
    uint32_t                    maxFragmentInputComponents                      = {};
    uint32_t                    maxFragmentOutputAttachments                    = {};
    uint32_t                    maxFragmentDualSrcAttachments                   = {};
    uint32_t                    maxFragmentCombinedOutputResources              = {};
    uint32_t                    maxComputeSharedMemorySize                      = {};
    ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount                        = {};
    uint32_t                    maxComputeWorkGroupInvocations                  = {};
    ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize                         = {};
    uint32_t                    subPixelPrecisionBits                           = {};
    uint32_t                    subTexelPrecisionBits                           = {};
    uint32_t                    mipmapPrecisionBits                             = {};
    uint32_t                    maxDrawIndexedIndexValue                        = {};
    uint32_t                    maxDrawIndirectCount                            = {};
    float                       maxSamplerLodBias                               = {};
    float                       maxSamplerAnisotropy                            = {};
    uint32_t                    maxViewports                                    = {};
    ArrayWrapper1D<uint32_t, 2> maxViewportDimensions                           = {};
    ArrayWrapper1D<float, 2>    viewportBoundsRange                             = {};
    uint32_t                    viewportSubPixelBits                            = {};
    size_t                      minMemoryMapAlignment                           = {};
    DeviceSize                  minTexelBufferOffsetAlignment                   = {};
    DeviceSize                  minUniformBufferOffsetAlignment                 = {};
    DeviceSize                  minStorageBufferOffsetAlignment                 = {};
    int32_t                     minTexelOffset                                  = {};
    uint32_t                    maxTexelOffset                                  = {};
    int32_t                     minTexelGatherOffset                            = {};
    uint32_t                    maxTexelGatherOffset                            = {};
    float                       minInterpolationOffset                          = {};
    float                       maxInterpolationOffset                          = {};
    uint32_t                    subPixelInterpolationOffsetBits                 = {};
    uint32_t                    maxFramebufferWidth                             = {};
    uint32_t                    maxFramebufferHeight                            = {};
    uint32_t                    maxFramebufferLayers                            = {};
    SampleCountFlags            framebufferColorSampleCounts                    = {};
    SampleCountFlags            framebufferDepthSampleCounts                    = {};
    SampleCountFlags            framebufferStencilSampleCounts                  = {};
    SampleCountFlags            framebufferNoAttachmentsSampleCounts            = {};
    uint32_t                    maxColorAttachments                             = {};
    SampleCountFlags            sampledImageColorSampleCounts                   = {};
    SampleCountFlags            sampledImageIntegerSampleCounts                 = {};
    SampleCountFlags            sampledImageDepthSampleCounts                   = {};
    SampleCountFlags            sampledImageStencilSampleCounts                 = {};
    SampleCountFlags            storageImageSampleCounts                        = {};
    uint32_t                    maxSampleMaskWords                              = {};
    Bool32                      timestampComputeAndGraphics                     = {};
    float                       timestampPeriod                                 = {};
    uint32_t                    maxClipDistances                                = {};
    uint32_t                    maxCullDistances                                = {};
    uint32_t                    maxCombinedClipAndCullDistances                 = {};
    uint32_t                    discreteQueuePriorities                         = {};
    ArrayWrapper1D<float, 2>    pointSizeRange                                  = {};
    ArrayWrapper1D<float, 2>    lineWidthRange                                  = {};
    float                       pointSizeGranularity                            = {};
    float                       lineWidthGranularity                            = {};
    Bool32                      strictLines                                     = {};
    Bool32                      standardSampleLocations                         = {};
    DeviceSize                  optimalBufferCopyOffsetAlignment                = {};
    DeviceSize                  optimalBufferCopyRowPitchAlignment              = {};
    DeviceSize                  nonCoherentAtomSize                             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLimits>
  {
    using Type = PhysicalDeviceLimits;
  };
#endif

  // wrapper struct for struct VkPhysicalDeviceSparseProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseProperties.html
  struct PhysicalDeviceSparseProperties
  {
    using NativeType = VkPhysicalDeviceSparseProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( Bool32 residencyStandard2DBlockShape_            = {},
                                                         Bool32 residencyStandard2DMultisampleBlockShape_ = {},
                                                         Bool32 residencyStandard3DBlockShape_            = {},
                                                         Bool32 residencyAlignedMipSize_                  = {},
                                                         Bool32 residencyNonResidentStrict_               = {} ) VULKAN_HPP_NOEXCEPT
      : residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ }
      , residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ }
      , residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ }
      , residencyAlignedMipSize{ residencyAlignedMipSize_ }
      , residencyNonResidentStrict{ residencyNonResidentStrict_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties *>( this );
    }

    operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSparseProperties *>( this );
    }

    operator VkPhysicalDeviceSparseProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSparseProperties *>( this );
    }

    operator VkPhysicalDeviceSparseProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSparseProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( residencyStandard2DBlockShape,
                       residencyStandard2DMultisampleBlockShape,
                       residencyStandard3DBlockShape,
                       residencyAlignedMipSize,
                       residencyNonResidentStrict );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) &&
             ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) &&
             ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) &&
             ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
#  endif
    }

    bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Bool32 residencyStandard2DBlockShape            = {};
    Bool32 residencyStandard2DMultisampleBlockShape = {};
    Bool32 residencyStandard3DBlockShape            = {};
    Bool32 residencyAlignedMipSize                  = {};
    Bool32 residencyNonResidentStrict               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSparseProperties>
  {
    using Type = PhysicalDeviceSparseProperties;
  };
#endif

  // wrapper struct for struct VkPhysicalDeviceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties.html
  struct PhysicalDeviceProperties
  {
    using NativeType = VkPhysicalDeviceProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t                                                   apiVersion_    = {},
                                                      uint32_t                                                   driverVersion_ = {},
                                                      uint32_t                                                   vendorID_      = {},
                                                      uint32_t                                                   deviceID_      = {},
                                                      PhysicalDeviceType                                         deviceType_    = PhysicalDeviceType::eOther,
                                                      std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_    = {},
                                                      std::array<uint8_t, VK_UUID_SIZE> const &                  pipelineCacheUUID_ = {},
                                                      PhysicalDeviceLimits                                       limits_            = {},
                                                      PhysicalDeviceSparseProperties                             sparseProperties_  = {} ) VULKAN_HPP_NOEXCEPT
      : apiVersion{ apiVersion_ }
      , driverVersion{ driverVersion_ }
      , vendorID{ vendorID_ }
      , deviceID{ deviceID_ }
      , deviceType{ deviceType_ }
      , deviceName{ deviceName_ }
      , pipelineCacheUUID{ pipelineCacheUUID_ }
      , limits{ limits_ }
      , sparseProperties{ sparseProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProperties *>( this );
    }

    operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProperties *>( this );
    }

    operator VkPhysicalDeviceProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProperties *>( this );
    }

    operator VkPhysicalDeviceProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               PhysicalDeviceType const &,
               ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               PhysicalDeviceLimits const &,
               PhysicalDeviceSparseProperties const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 )
        return cmp;
      if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 )
        return cmp;
      if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 )
        return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
      if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 )
        return cmp;
      if ( auto cmp = limits <=> rhs.limits; cmp != 0 )
        return cmp;
      if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 )
        return cmp;

      return std::partial_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
             ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
             ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties );
    }

    bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    uint32_t                                               apiVersion        = {};
    uint32_t                                               driverVersion     = {};
    uint32_t                                               vendorID          = {};
    uint32_t                                               deviceID          = {};
    PhysicalDeviceType                                     deviceType        = PhysicalDeviceType::eOther;
    ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName        = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE>                  pipelineCacheUUID = {};
    PhysicalDeviceLimits                                   limits            = {};
    PhysicalDeviceSparseProperties                         sparseProperties  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProperties>
  {
    using Type = PhysicalDeviceProperties;
  };
#endif

  // wrapper struct for struct VkPhysicalDeviceProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties2.html
  struct PhysicalDeviceProperties2
  {
    using NativeType = VkPhysicalDeviceProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties properties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , properties{ properties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
    {
    }

    PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProperties2 *>( this );
    }

    operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProperties2 *>( this );
    }

    operator VkPhysicalDeviceProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProperties2 *>( this );
    }

    operator VkPhysicalDeviceProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PhysicalDeviceProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, properties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
#  endif
    }

    bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType      = StructureType::ePhysicalDeviceProperties2;
    void *                   pNext      = {};
    PhysicalDeviceProperties properties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProperties2>
  {
    using Type = PhysicalDeviceProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
  {
    using Type = PhysicalDeviceProperties2;
  };

  using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;

  // wrapper struct for struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiVulkanPropertiesKHR.html
  struct PhysicalDeviceLayeredApiVulkanPropertiesKHR
  {
    using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceProperties2 properties_ = {},
                                                                         void *                    pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , properties{ properties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast<PhysicalDeviceLayeredApiVulkanPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLayeredApiVulkanPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PhysicalDeviceProperties2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, properties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
#  endif
    }

    bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType      = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR;
    void *                    pNext      = {};
    PhysicalDeviceProperties2 properties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLayeredApiVulkanPropertiesKHR>
  {
    using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR>
  {
    using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceLayeredDriverPropertiesMSFT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredDriverPropertiesMSFT.html
  struct PhysicalDeviceLayeredDriverPropertiesMSFT
  {
    using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( LayeredDriverUnderlyingApiMSFT underlyingAPI_ = LayeredDriverUnderlyingApiMSFT::eNone,
                                                                    void *                         pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , underlyingAPI{ underlyingAPI_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast<PhysicalDeviceLayeredDriverPropertiesMSFT const *>( &rhs ) )
    {
    }

    PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLayeredDriverPropertiesMSFT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLayeredDriverPropertiesMSFT *>( this );
    }

    operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLayeredDriverPropertiesMSFT *>( this );
    }

    operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLayeredDriverPropertiesMSFT *>( this );
    }

    operator VkPhysicalDeviceLayeredDriverPropertiesMSFT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLayeredDriverPropertiesMSFT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, LayeredDriverUnderlyingApiMSFT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, underlyingAPI );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI );
#  endif
    }

    bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType         = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT;
    void *                         pNext         = {};
    LayeredDriverUnderlyingApiMSFT underlyingAPI = LayeredDriverUnderlyingApiMSFT::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLayeredDriverPropertiesMSFT>
  {
    using Type = PhysicalDeviceLayeredDriverPropertiesMSFT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT>
  {
    using Type = PhysicalDeviceLayeredDriverPropertiesMSFT;
  };

  // wrapper struct for struct VkPhysicalDeviceLegacyDitheringFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyDitheringFeaturesEXT.html
  struct PhysicalDeviceLegacyDitheringFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( Bool32 legacyDithering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , legacyDithering{ legacyDithering_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLegacyDitheringFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT
    {
      legacyDithering = legacyDithering_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyDitheringFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLegacyDitheringFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, legacyDithering );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering );
#  endif
    }

    bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT;
    void *        pNext           = {};
    Bool32        legacyDithering = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLegacyDitheringFeaturesEXT>
  {
    using Type = PhysicalDeviceLegacyDitheringFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT>
  {
    using Type = PhysicalDeviceLegacyDitheringFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT.html
  struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( Bool32 legacyVertexAttributes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , legacyVertexAttributes{ legacyVertexAttributes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setLegacyVertexAttributes( Bool32 legacyVertexAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      legacyVertexAttributes = legacyVertexAttributes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, legacyVertexAttributes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyVertexAttributes == rhs.legacyVertexAttributes );
#  endif
    }

    bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT;
    void *        pNext                  = {};
    Bool32        legacyVertexAttributes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT>
  {
    using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT>
  {
    using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT.html
  struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( Bool32 nativeUnalignedPerformance_ = {},
                                                                            void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , nativeUnalignedPerformance{ nativeUnalignedPerformance_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceLegacyVertexAttributesPropertiesEXT &
      operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLegacyVertexAttributesPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, nativeUnalignedPerformance );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance );
#  endif
    }

    bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT;
    void *        pNext                      = {};
    Bool32        nativeUnalignedPerformance = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT>
  {
    using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT>
  {
    using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceLineRasterizationFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationFeatures.html
  struct PhysicalDeviceLineRasterizationFeatures
  {
    using NativeType = VkPhysicalDeviceLineRasterizationFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLineRasterizationFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( Bool32 rectangularLines_         = {},
                                                                  Bool32 bresenhamLines_           = {},
                                                                  Bool32 smoothLines_              = {},
                                                                  Bool32 stippledRectangularLines_ = {},
                                                                  Bool32 stippledBresenhamLines_   = {},
                                                                  Bool32 stippledSmoothLines_      = {},
                                                                  void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rectangularLines{ rectangularLines_ }
      , bresenhamLines{ bresenhamLines_ }
      , smoothLines{ smoothLines_ }
      , stippledRectangularLines{ stippledRectangularLines_ }
      , stippledBresenhamLines{ stippledBresenhamLines_ }
      , stippledSmoothLines{ stippledSmoothLines_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast<PhysicalDeviceLineRasterizationFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLineRasterizationFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangularLines = rectangularLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      bresenhamLines = bresenhamLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      smoothLines = smoothLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledRectangularLines( Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledRectangularLines = stippledRectangularLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledBresenhamLines( Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledBresenhamLines = stippledBresenhamLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledSmoothLines( Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledSmoothLines = stippledSmoothLines_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeatures *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeatures *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeatures *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) &&
             ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) &&
             ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines );
#  endif
    }

    bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceLineRasterizationFeatures;
    void *        pNext                    = {};
    Bool32        rectangularLines         = {};
    Bool32        bresenhamLines           = {};
    Bool32        smoothLines              = {};
    Bool32        stippledRectangularLines = {};
    Bool32        stippledBresenhamLines   = {};
    Bool32        stippledSmoothLines      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLineRasterizationFeatures>
  {
    using Type = PhysicalDeviceLineRasterizationFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeatures>
  {
    using Type = PhysicalDeviceLineRasterizationFeatures;
  };

  using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures;
  using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures;

  // wrapper struct for struct VkPhysicalDeviceLineRasterizationProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationProperties.html
  struct PhysicalDeviceLineRasterizationProperties
  {
    using NativeType = VkPhysicalDeviceLineRasterizationProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLineRasterizationProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLineRasterizationProperties( *reinterpret_cast<PhysicalDeviceLineRasterizationProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLineRasterizationProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationProperties *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationProperties *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLineRasterizationProperties *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLineRasterizationProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, lineSubPixelPrecisionBits );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
#  endif
    }

    bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceLineRasterizationProperties;
    void *        pNext                     = {};
    uint32_t      lineSubPixelPrecisionBits = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLineRasterizationProperties>
  {
    using Type = PhysicalDeviceLineRasterizationProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationProperties>
  {
    using Type = PhysicalDeviceLineRasterizationProperties;
  };

  using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties;
  using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties;

  // wrapper struct for struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLinearColorAttachmentFeaturesNV.html
  struct PhysicalDeviceLinearColorAttachmentFeaturesNV
  {
    using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( Bool32 linearColorAttachment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , linearColorAttachment{ linearColorAttachment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment( Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      linearColorAttachment = linearColorAttachment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, linearColorAttachment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment );
#  endif
    }

    bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
    void *        pNext                 = {};
    Bool32        linearColorAttachment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceLinearColorAttachmentFeaturesNV>
  {
    using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV>
  {
    using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance10FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance10FeaturesKHR.html
  struct PhysicalDeviceMaintenance10FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance10FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance10FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10FeaturesKHR( Bool32 maintenance10_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance10{ maintenance10_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10FeaturesKHR( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance10FeaturesKHR( VkPhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance10FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance10FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance10FeaturesKHR & operator=( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance10FeaturesKHR & operator=( VkPhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance10FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR & setMaintenance10( Bool32 maintenance10_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance10 = maintenance10_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance10FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance10FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance10FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance10FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance10FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance10 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance10FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance10 == rhs.maintenance10 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceMaintenance10FeaturesKHR;
    void *        pNext         = {};
    Bool32        maintenance10 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance10FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance10FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance10FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance10FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance10PropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance10PropertiesKHR.html
  struct PhysicalDeviceMaintenance10PropertiesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance10PropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance10PropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10PropertiesKHR( Bool32 rgba4OpaqueBlackSwizzled_                         = {},
                                                                   Bool32 resolveSrgbFormatAppliesTransferFunction_         = {},
                                                                   Bool32 resolveSrgbFormatSupportsTransferFunctionControl_ = {},
                                                                   void * pNext_                                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rgba4OpaqueBlackSwizzled{ rgba4OpaqueBlackSwizzled_ }
      , resolveSrgbFormatAppliesTransferFunction{ resolveSrgbFormatAppliesTransferFunction_ }
      , resolveSrgbFormatSupportsTransferFunctionControl{ resolveSrgbFormatSupportsTransferFunctionControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10PropertiesKHR( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance10PropertiesKHR( VkPhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance10PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance10PropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance10PropertiesKHR & operator=( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance10PropertiesKHR & operator=( VkPhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance10PropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance10PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance10PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10PropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance10PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance10PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance10PropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance10PropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rgba4OpaqueBlackSwizzled, resolveSrgbFormatAppliesTransferFunction, resolveSrgbFormatSupportsTransferFunctionControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance10PropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rgba4OpaqueBlackSwizzled == rhs.rgba4OpaqueBlackSwizzled ) &&
             ( resolveSrgbFormatAppliesTransferFunction == rhs.resolveSrgbFormatAppliesTransferFunction ) &&
             ( resolveSrgbFormatSupportsTransferFunctionControl == rhs.resolveSrgbFormatSupportsTransferFunctionControl );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                            = StructureType::ePhysicalDeviceMaintenance10PropertiesKHR;
    void *        pNext                                            = {};
    Bool32        rgba4OpaqueBlackSwizzled                         = {};
    Bool32        resolveSrgbFormatAppliesTransferFunction         = {};
    Bool32        resolveSrgbFormatSupportsTransferFunctionControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance10PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance10PropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance10PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance10PropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance3Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance3Properties.html
  struct PhysicalDeviceMaintenance3Properties
  {
    using NativeType = VkPhysicalDeviceMaintenance3Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance3Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t   maxPerSetDescriptors_    = {},
                                                               DeviceSize maxMemoryAllocationSize_ = {},
                                                               void *     pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxPerSetDescriptors{ maxPerSetDescriptors_ }
      , maxMemoryAllocationSize{ maxMemoryAllocationSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance3Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance3Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
             ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceMaintenance3Properties;
    void *        pNext                   = {};
    uint32_t      maxPerSetDescriptors    = {};
    DeviceSize    maxMemoryAllocationSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance3Properties>
  {
    using Type = PhysicalDeviceMaintenance3Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
  {
    using Type = PhysicalDeviceMaintenance3Properties;
  };

  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;

  // wrapper struct for struct VkPhysicalDeviceMaintenance4Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Features.html
  struct PhysicalDeviceMaintenance4Features
  {
    using NativeType = VkPhysicalDeviceMaintenance4Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance4Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance4{ maintenance4_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance4 = maintenance4_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance4Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance4Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance4 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance4Features;
    void *        pNext        = {};
    Bool32        maintenance4 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance4Features>
  {
    using Type = PhysicalDeviceMaintenance4Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
  {
    using Type = PhysicalDeviceMaintenance4Features;
  };

  using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;

  // wrapper struct for struct VkPhysicalDeviceMaintenance4Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Properties.html
  struct PhysicalDeviceMaintenance4Properties
  {
    using NativeType = VkPhysicalDeviceMaintenance4Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance4Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxBufferSize{ maxBufferSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance4Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxBufferSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceMaintenance4Properties;
    void *        pNext         = {};
    DeviceSize    maxBufferSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance4Properties>
  {
    using Type = PhysicalDeviceMaintenance4Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
  {
    using Type = PhysicalDeviceMaintenance4Properties;
  };

  using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;

  // wrapper struct for struct VkPhysicalDeviceMaintenance5Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Features.html
  struct PhysicalDeviceMaintenance5Features
  {
    using NativeType = VkPhysicalDeviceMaintenance5Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance5Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance5{ maintenance5_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance5Features( *reinterpret_cast<PhysicalDeviceMaintenance5Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance5Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance5 = maintenance5_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance5Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance5Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance5Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance5Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance5 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance5Features;
    void *        pNext        = {};
    Bool32        maintenance5 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance5Features>
  {
    using Type = PhysicalDeviceMaintenance5Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5Features>
  {
    using Type = PhysicalDeviceMaintenance5Features;
  };

  using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features;

  // wrapper struct for struct VkPhysicalDeviceMaintenance5Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Properties.html
  struct PhysicalDeviceMaintenance5Properties
  {
    using NativeType = VkPhysicalDeviceMaintenance5Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance5Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {},
                                                               Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_     = {},
                                                               Bool32 depthStencilSwizzleOneSupport_                       = {},
                                                               Bool32 polygonModePointSize_                                = {},
                                                               Bool32 nonStrictSinglePixelWideLinesUseParallelogram_       = {},
                                                               Bool32 nonStrictWideLinesUseParallelogram_                  = {},
                                                               void * pNext_                                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ }
      , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ }
      , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ }
      , polygonModePointSize{ polygonModePointSize_ }
      , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ }
      , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance5Properties( *reinterpret_cast<PhysicalDeviceMaintenance5Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance5Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance5Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance5Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance5Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance5Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance5Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       earlyFragmentMultisampleCoverageAfterSampleCounting,
                       earlyFragmentSampleMaskTestBeforeSampleCounting,
                       depthStencilSwizzleOneSupport,
                       polygonModePointSize,
                       nonStrictSinglePixelWideLinesUseParallelogram,
                       nonStrictWideLinesUseParallelogram );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) &&
             ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) &&
             ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) &&
             ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) &&
             ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                               = StructureType::ePhysicalDeviceMaintenance5Properties;
    void *        pNext                                               = {};
    Bool32        earlyFragmentMultisampleCoverageAfterSampleCounting = {};
    Bool32        earlyFragmentSampleMaskTestBeforeSampleCounting     = {};
    Bool32        depthStencilSwizzleOneSupport                       = {};
    Bool32        polygonModePointSize                                = {};
    Bool32        nonStrictSinglePixelWideLinesUseParallelogram       = {};
    Bool32        nonStrictWideLinesUseParallelogram                  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance5Properties>
  {
    using Type = PhysicalDeviceMaintenance5Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance5Properties>
  {
    using Type = PhysicalDeviceMaintenance5Properties;
  };

  using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties;

  // wrapper struct for struct VkPhysicalDeviceMaintenance6Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Features.html
  struct PhysicalDeviceMaintenance6Features
  {
    using NativeType = VkPhysicalDeviceMaintenance6Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance6Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance6{ maintenance6_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance6Features( *reinterpret_cast<PhysicalDeviceMaintenance6Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance6Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance6 = maintenance6_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance6Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance6Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance6Features *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance6Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance6 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance6 == rhs.maintenance6 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance6Features;
    void *        pNext        = {};
    Bool32        maintenance6 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance6Features>
  {
    using Type = PhysicalDeviceMaintenance6Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance6Features>
  {
    using Type = PhysicalDeviceMaintenance6Features;
  };

  using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features;

  // wrapper struct for struct VkPhysicalDeviceMaintenance6Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Properties.html
  struct PhysicalDeviceMaintenance6Properties
  {
    using NativeType = VkPhysicalDeviceMaintenance6Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance6Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( Bool32   blockTexelViewCompatibleMultipleLayers_ = {},
                                                               uint32_t maxCombinedImageSamplerDescriptorCount_ = {},
                                                               Bool32   fragmentShadingRateClampCombinerInputs_ = {},
                                                               void *   pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ }
      , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ }
      , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance6Properties( *reinterpret_cast<PhysicalDeviceMaintenance6Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance6Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance6Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance6Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance6Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance6Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance6Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, uint32_t const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) &&
             ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) &&
             ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceMaintenance6Properties;
    void *        pNext                                  = {};
    Bool32        blockTexelViewCompatibleMultipleLayers = {};
    uint32_t      maxCombinedImageSamplerDescriptorCount = {};
    Bool32        fragmentShadingRateClampCombinerInputs = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance6Properties>
  {
    using Type = PhysicalDeviceMaintenance6Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance6Properties>
  {
    using Type = PhysicalDeviceMaintenance6Properties;
  };

  using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties;

  // wrapper struct for struct VkPhysicalDeviceMaintenance7FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7FeaturesKHR.html
  struct PhysicalDeviceMaintenance7FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( Bool32 maintenance7_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance7{ maintenance7_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance7FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance7FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( Bool32 maintenance7_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance7 = maintenance7_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance7FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance7FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance7FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance7FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance7 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance7 == rhs.maintenance7 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR;
    void *        pNext        = {};
    Bool32        maintenance7 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance7FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance7FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance7FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance7FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance7PropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7PropertiesKHR.html
  struct PhysicalDeviceMaintenance7PropertiesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( Bool32   robustFragmentShadingRateAttachmentAccess_                 = {},
                                                                  Bool32   separateDepthStencilAttachmentAccess_                      = {},
                                                                  uint32_t maxDescriptorSetTotalUniformBuffersDynamic_                = {},
                                                                  uint32_t maxDescriptorSetTotalStorageBuffersDynamic_                = {},
                                                                  uint32_t maxDescriptorSetTotalBuffersDynamic_                       = {},
                                                                  uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {},
                                                                  uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {},
                                                                  uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_        = {},
                                                                  void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ }
      , separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ }
      , maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ }
      , maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ }
      , maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance7PropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance7PropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance7PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance7PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance7PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance7PropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance7PropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       robustFragmentShadingRateAttachmentAccess,
                       separateDepthStencilAttachmentAccess,
                       maxDescriptorSetTotalUniformBuffersDynamic,
                       maxDescriptorSetTotalStorageBuffersDynamic,
                       maxDescriptorSetTotalBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindTotalBuffersDynamic );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess ) &&
             ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess ) &&
             ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic ) &&
             ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic ) &&
             ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                     = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR;
    void *        pNext                                                     = {};
    Bool32        robustFragmentShadingRateAttachmentAccess                 = {};
    Bool32        separateDepthStencilAttachmentAccess                      = {};
    uint32_t      maxDescriptorSetTotalUniformBuffersDynamic                = {};
    uint32_t      maxDescriptorSetTotalStorageBuffersDynamic                = {};
    uint32_t      maxDescriptorSetTotalBuffersDynamic                       = {};
    uint32_t      maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {};
    uint32_t      maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {};
    uint32_t      maxDescriptorSetUpdateAfterBindTotalBuffersDynamic        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance7PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance7PropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance7PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance7PropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance8FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance8FeaturesKHR.html
  struct PhysicalDeviceMaintenance8FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance8FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( Bool32 maintenance8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance8{ maintenance8_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance8FeaturesKHR( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance8FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance8FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance8FeaturesKHR & operator=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance8FeaturesKHR & operator=( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance8FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setMaintenance8( Bool32 maintenance8_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance8 = maintenance8_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance8FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance8FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance8FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance8FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance8FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance8FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance8FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance8 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance8FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance8 == rhs.maintenance8 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR;
    void *        pNext        = {};
    Bool32        maintenance8 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance8FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance8FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance8FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance8FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance9FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9FeaturesKHR.html
  struct PhysicalDeviceMaintenance9FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance9FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR( Bool32 maintenance9_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maintenance9{ maintenance9_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance9FeaturesKHR( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance9FeaturesKHR( *reinterpret_cast<PhysicalDeviceMaintenance9FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance9FeaturesKHR & operator=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance9FeaturesKHR & operator=( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance9FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setMaintenance9( Bool32 maintenance9_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance9 = maintenance9_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMaintenance9FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance9FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maintenance9 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance9FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance9 == rhs.maintenance9 );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR;
    void *        pNext        = {};
    Bool32        maintenance9 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance9FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance9FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance9FeaturesKHR>
  {
    using Type = PhysicalDeviceMaintenance9FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMaintenance9PropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9PropertiesKHR.html
  struct PhysicalDeviceMaintenance9PropertiesKHR
  {
    using NativeType = VkPhysicalDeviceMaintenance9PropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMaintenance9PropertiesKHR( Bool32                         image2DViewOf3DSparse_       = {},
                                               DefaultVertexAttributeValueKHR defaultVertexAttributeValue_ = DefaultVertexAttributeValueKHR::eZeroZeroZeroZero,
                                               void *                         pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , image2DViewOf3DSparse{ image2DViewOf3DSparse_ }
      , defaultVertexAttributeValue{ defaultVertexAttributeValue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9PropertiesKHR( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance9PropertiesKHR( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance9PropertiesKHR( *reinterpret_cast<PhysicalDeviceMaintenance9PropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceMaintenance9PropertiesKHR & operator=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMaintenance9PropertiesKHR & operator=( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMaintenance9PropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance9PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance9PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9PropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance9PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMaintenance9PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceMaintenance9PropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMaintenance9PropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, DefaultVertexAttributeValueKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, image2DViewOf3DSparse, defaultVertexAttributeValue );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance9PropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3DSparse == rhs.image2DViewOf3DSparse ) &&
             ( defaultVertexAttributeValue == rhs.defaultVertexAttributeValue );
#  endif
    }

    bool operator!=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType                       = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR;
    void *                         pNext                       = {};
    Bool32                         image2DViewOf3DSparse       = {};
    DefaultVertexAttributeValueKHR defaultVertexAttributeValue = DefaultVertexAttributeValueKHR::eZeroZeroZeroZero;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMaintenance9PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance9PropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance9PropertiesKHR>
  {
    using Type = PhysicalDeviceMaintenance9PropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedFeaturesEXT.html
  struct PhysicalDeviceMapMemoryPlacedFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( Bool32 memoryMapPlaced_      = {},
                                                                   Bool32 memoryMapRangePlaced_ = {},
                                                                   Bool32 memoryUnmapReserve_   = {},
                                                                   void * pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryMapPlaced{ memoryMapPlaced_ }
      , memoryMapRangePlaced{ memoryMapRangePlaced_ }
      , memoryUnmapReserve{ memoryUnmapReserve_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast<PhysicalDeviceMapMemoryPlacedFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMapMemoryPlacedFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( Bool32 memoryMapPlaced_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryMapPlaced = memoryMapPlaced_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapRangePlaced( Bool32 memoryMapRangePlaced_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryMapRangePlaced = memoryMapRangePlaced_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryUnmapReserve( Bool32 memoryUnmapReserve_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryUnmapReserve = memoryUnmapReserve_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryMapPlaced == rhs.memoryMapPlaced ) &&
             ( memoryMapRangePlaced == rhs.memoryMapRangePlaced ) && ( memoryUnmapReserve == rhs.memoryUnmapReserve );
#  endif
    }

    bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT;
    void *        pNext                = {};
    Bool32        memoryMapPlaced      = {};
    Bool32        memoryMapRangePlaced = {};
    Bool32        memoryUnmapReserve   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMapMemoryPlacedFeaturesEXT>
  {
    using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT>
  {
    using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedPropertiesEXT.html
  struct PhysicalDeviceMapMemoryPlacedPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( DeviceSize minPlacedMemoryMapAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast<PhysicalDeviceMapMemoryPlacedPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMapMemoryPlacedPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minPlacedMemoryMapAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT;
    void *        pNext                       = {};
    DeviceSize    minPlacedMemoryMapAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMapMemoryPlacedPropertiesEXT>
  {
    using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT>
  {
    using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMemoryBudgetPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html
  struct PhysicalDeviceMemoryBudgetPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {},
                                                                     std::array<DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapUsage_  = {},
                                                                     void *                                              pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , heapBudget{ heapBudget_ }
      , heapUsage{ heapUsage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<DeviceSize, VK_MAX_MEMORY_HEAPS> const &,
               ArrayWrapper1D<DeviceSize, VK_MAX_MEMORY_HEAPS> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, heapBudget, heapUsage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage );
#  endif
    }

    bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                   sType      = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
    void *                                          pNext      = {};
    ArrayWrapper1D<DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
    ArrayWrapper1D<DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryBudgetPropertiesEXT>
  {
    using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
  {
    using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionFeaturesEXT.html
  struct PhysicalDeviceMemoryDecompressionFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesEXT( Bool32 memoryDecompression_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryDecompression{ memoryDecompression_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesEXT( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryDecompressionFeaturesEXT( VkPhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryDecompressionFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryDecompressionFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryDecompressionFeaturesEXT & operator=( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryDecompressionFeaturesEXT & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryDecompressionFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT & setMemoryDecompression( Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryDecompression = memoryDecompression_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryDecompressionFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryDecompression );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression );
#  endif
    }

    bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesEXT;
    void *        pNext               = {};
    Bool32        memoryDecompression = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryDecompressionFeaturesEXT>
  {
    using Type = PhysicalDeviceMemoryDecompressionFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionFeaturesEXT>
  {
    using Type = PhysicalDeviceMemoryDecompressionFeaturesEXT;
  };

  using PhysicalDeviceMemoryDecompressionFeaturesNV = PhysicalDeviceMemoryDecompressionFeaturesEXT;

  // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionPropertiesEXT.html
  struct PhysicalDeviceMemoryDecompressionPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesEXT( MemoryDecompressionMethodFlagsEXT decompressionMethods_          = {},
                                                                         uint64_t                          maxDecompressionIndirectCount_ = {},
                                                                         void *                            pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , decompressionMethods{ decompressionMethods_ }
      , maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMemoryDecompressionPropertiesEXT( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryDecompressionPropertiesEXT( VkPhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryDecompressionPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryDecompressionPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryDecompressionPropertiesEXT & operator=( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryDecompressionPropertiesEXT & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryDecompressionPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryDecompressionPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryDecompressionPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryDecompressionPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, MemoryDecompressionMethodFlagsEXT const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) &&
             ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount );
#  endif
    }

    bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                         = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesEXT;
    void *                            pNext                         = {};
    MemoryDecompressionMethodFlagsEXT decompressionMethods          = {};
    uint64_t                          maxDecompressionIndirectCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryDecompressionPropertiesEXT>
  {
    using Type = PhysicalDeviceMemoryDecompressionPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryDecompressionPropertiesEXT>
  {
    using Type = PhysicalDeviceMemoryDecompressionPropertiesEXT;
  };

  using PhysicalDeviceMemoryDecompressionPropertiesNV = PhysicalDeviceMemoryDecompressionPropertiesEXT;

  // wrapper struct for struct VkPhysicalDeviceMemoryPriorityFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html
  struct PhysicalDeviceMemoryPriorityFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( Bool32 memoryPriority_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryPriority{ memoryPriority_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryPriority = memoryPriority_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryPriority );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority );
#  endif
    }

    bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
    void *        pNext          = {};
    Bool32        memoryPriority = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryPriorityFeaturesEXT>
  {
    using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
  {
    using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMemoryProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties.html
  struct PhysicalDeviceMemoryProperties
  {
    using NativeType = VkPhysicalDeviceMemoryProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t                                            memoryTypeCount_ = {},
                                                            std::array<MemoryType, VK_MAX_MEMORY_TYPES> const & memoryTypes_     = {},
                                                            uint32_t                                            memoryHeapCount_ = {},
                                                            std::array<MemoryHeap, VK_MAX_MEMORY_HEAPS> const & memoryHeaps_     = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeCount{ memoryTypeCount_ }
      , memoryTypes{ memoryTypes_ }
      , memoryHeapCount{ memoryHeapCount_ }
      , memoryHeaps{ memoryHeaps_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               ArrayWrapper1D<MemoryType, VK_MAX_MEMORY_TYPES> const &,
               uint32_t const &,
               ArrayWrapper1D<MemoryHeap, VK_MAX_MEMORY_HEAPS> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < memoryTypeCount; ++i )
      {
        if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 )
          return cmp;
      }
      if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < memoryHeapCount; ++i )
      {
        if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 )
          return cmp;
      }

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( MemoryType ) ) == 0 ) &&
             ( memoryHeapCount == rhs.memoryHeapCount ) && ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( MemoryHeap ) ) == 0 );
    }

    bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    uint32_t                                        memoryTypeCount = {};
    ArrayWrapper1D<MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes     = {};
    uint32_t                                        memoryHeapCount = {};
    ArrayWrapper1D<MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryProperties>
  {
    using Type = PhysicalDeviceMemoryProperties;
  };
#endif

  // wrapper struct for struct VkPhysicalDeviceMemoryProperties2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties2.html
  struct PhysicalDeviceMemoryProperties2
  {
    using NativeType = VkPhysicalDeviceMemoryProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMemoryProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties memoryProperties_ = {},
                                                             void *                         pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryProperties{ memoryProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
    {
    }

    PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2 *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMemoryProperties2 *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PhysicalDeviceMemoryProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties );
#  endif
    }

    bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType            = StructureType::ePhysicalDeviceMemoryProperties2;
    void *                         pNext            = {};
    PhysicalDeviceMemoryProperties memoryProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMemoryProperties2>
  {
    using Type = PhysicalDeviceMemoryProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
  {
    using Type = PhysicalDeviceMemoryProperties2;
  };

  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;

  // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesEXT.html
  struct PhysicalDeviceMeshShaderFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( Bool32 taskShader_                             = {},
                                                              Bool32 meshShader_                             = {},
                                                              Bool32 multiviewMeshShader_                    = {},
                                                              Bool32 primitiveFragmentShadingRateMeshShader_ = {},
                                                              Bool32 meshShaderQueries_                      = {},
                                                              void * pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , taskShader{ taskShader_ }
      , meshShader{ meshShader_ }
      , multiviewMeshShader{ multiviewMeshShader_ }
      , primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ }
      , meshShaderQueries{ meshShaderQueries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
    {
      taskShader = taskShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
    {
      meshShader = meshShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMultiviewMeshShader( Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewMeshShader = multiviewMeshShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT &
      setPrimitiveFragmentShadingRateMeshShader( Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT
    {
      meshShaderQueries = meshShaderQueries_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) &&
             ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) &&
             ( meshShaderQueries == rhs.meshShaderQueries );
#  endif
    }

    bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT;
    void *        pNext                                  = {};
    Bool32        taskShader                             = {};
    Bool32        meshShader                             = {};
    Bool32        multiviewMeshShader                    = {};
    Bool32        primitiveFragmentShadingRateMeshShader = {};
    Bool32        meshShaderQueries                      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMeshShaderFeaturesEXT>
  {
    using Type = PhysicalDeviceMeshShaderFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesEXT>
  {
    using Type = PhysicalDeviceMeshShaderFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html
  struct PhysicalDeviceMeshShaderFeaturesNV
  {
    using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( Bool32 taskShader_ = {}, Bool32 meshShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , taskShader{ taskShader_ }
      , meshShader{ meshShader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
    {
      taskShader = taskShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
    {
      meshShader = meshShader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, taskShader, meshShader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader );
#  endif
    }

    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
    void *        pNext      = {};
    Bool32        taskShader = {};
    Bool32        meshShader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMeshShaderFeaturesNV>
  {
    using Type = PhysicalDeviceMeshShaderFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
  {
    using Type = PhysicalDeviceMeshShaderFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesEXT.html
  struct PhysicalDeviceMeshShaderPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t                        maxTaskWorkGroupTotalCount_            = {},
                                                                   std::array<uint32_t, 3> const & maxTaskWorkGroupCount_                 = {},
                                                                   uint32_t                        maxTaskWorkGroupInvocations_           = {},
                                                                   std::array<uint32_t, 3> const & maxTaskWorkGroupSize_                  = {},
                                                                   uint32_t                        maxTaskPayloadSize_                    = {},
                                                                   uint32_t                        maxTaskSharedMemorySize_               = {},
                                                                   uint32_t                        maxTaskPayloadAndSharedMemorySize_     = {},
                                                                   uint32_t                        maxMeshWorkGroupTotalCount_            = {},
                                                                   std::array<uint32_t, 3> const & maxMeshWorkGroupCount_                 = {},
                                                                   uint32_t                        maxMeshWorkGroupInvocations_           = {},
                                                                   std::array<uint32_t, 3> const & maxMeshWorkGroupSize_                  = {},
                                                                   uint32_t                        maxMeshSharedMemorySize_               = {},
                                                                   uint32_t                        maxMeshPayloadAndSharedMemorySize_     = {},
                                                                   uint32_t                        maxMeshOutputMemorySize_               = {},
                                                                   uint32_t                        maxMeshPayloadAndOutputMemorySize_     = {},
                                                                   uint32_t                        maxMeshOutputComponents_               = {},
                                                                   uint32_t                        maxMeshOutputVertices_                 = {},
                                                                   uint32_t                        maxMeshOutputPrimitives_               = {},
                                                                   uint32_t                        maxMeshOutputLayers_                   = {},
                                                                   uint32_t                        maxMeshMultiviewViewCount_             = {},
                                                                   uint32_t                        meshOutputPerVertexGranularity_        = {},
                                                                   uint32_t                        meshOutputPerPrimitiveGranularity_     = {},
                                                                   uint32_t                        maxPreferredTaskWorkGroupInvocations_  = {},
                                                                   uint32_t                        maxPreferredMeshWorkGroupInvocations_  = {},
                                                                   Bool32                          prefersLocalInvocationVertexOutput_    = {},
                                                                   Bool32                          prefersLocalInvocationPrimitiveOutput_ = {},
                                                                   Bool32                          prefersCompactVertexOutput_            = {},
                                                                   Bool32                          prefersCompactPrimitiveOutput_         = {},
                                                                   void *                          pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ }
      , maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ }
      , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ }
      , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ }
      , maxTaskPayloadSize{ maxTaskPayloadSize_ }
      , maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ }
      , maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ }
      , maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ }
      , maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ }
      , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ }
      , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ }
      , maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ }
      , maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ }
      , maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ }
      , maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ }
      , maxMeshOutputComponents{ maxMeshOutputComponents_ }
      , maxMeshOutputVertices{ maxMeshOutputVertices_ }
      , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ }
      , maxMeshOutputLayers{ maxMeshOutputLayers_ }
      , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ }
      , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ }
      , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ }
      , maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ }
      , maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ }
      , prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ }
      , prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ }
      , prefersCompactVertexOutput{ prefersCompactVertexOutput_ }
      , prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxTaskWorkGroupTotalCount,
                       maxTaskWorkGroupCount,
                       maxTaskWorkGroupInvocations,
                       maxTaskWorkGroupSize,
                       maxTaskPayloadSize,
                       maxTaskSharedMemorySize,
                       maxTaskPayloadAndSharedMemorySize,
                       maxMeshWorkGroupTotalCount,
                       maxMeshWorkGroupCount,
                       maxMeshWorkGroupInvocations,
                       maxMeshWorkGroupSize,
                       maxMeshSharedMemorySize,
                       maxMeshPayloadAndSharedMemorySize,
                       maxMeshOutputMemorySize,
                       maxMeshPayloadAndOutputMemorySize,
                       maxMeshOutputComponents,
                       maxMeshOutputVertices,
                       maxMeshOutputPrimitives,
                       maxMeshOutputLayers,
                       maxMeshMultiviewViewCount,
                       meshOutputPerVertexGranularity,
                       meshOutputPerPrimitiveGranularity,
                       maxPreferredTaskWorkGroupInvocations,
                       maxPreferredMeshWorkGroupInvocations,
                       prefersLocalInvocationVertexOutput,
                       prefersLocalInvocationPrimitiveOutput,
                       prefersCompactVertexOutput,
                       prefersCompactPrimitiveOutput );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) &&
             ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) &&
             ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) &&
             ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) &&
             ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) &&
             ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
             ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) &&
             ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) &&
             ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
             ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) &&
             ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) &&
             ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) &&
             ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) &&
             ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) &&
             ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) &&
             ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) &&
             ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput );
#  endif
    }

    bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                                 = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT;
    void *                      pNext                                 = {};
    uint32_t                    maxTaskWorkGroupTotalCount            = {};
    ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupCount                 = {};
    uint32_t                    maxTaskWorkGroupInvocations           = {};
    ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize                  = {};
    uint32_t                    maxTaskPayloadSize                    = {};
    uint32_t                    maxTaskSharedMemorySize               = {};
    uint32_t                    maxTaskPayloadAndSharedMemorySize     = {};
    uint32_t                    maxMeshWorkGroupTotalCount            = {};
    ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupCount                 = {};
    uint32_t                    maxMeshWorkGroupInvocations           = {};
    ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize                  = {};
    uint32_t                    maxMeshSharedMemorySize               = {};
    uint32_t                    maxMeshPayloadAndSharedMemorySize     = {};
    uint32_t                    maxMeshOutputMemorySize               = {};
    uint32_t                    maxMeshPayloadAndOutputMemorySize     = {};
    uint32_t                    maxMeshOutputComponents               = {};
    uint32_t                    maxMeshOutputVertices                 = {};
    uint32_t                    maxMeshOutputPrimitives               = {};
    uint32_t                    maxMeshOutputLayers                   = {};
    uint32_t                    maxMeshMultiviewViewCount             = {};
    uint32_t                    meshOutputPerVertexGranularity        = {};
    uint32_t                    meshOutputPerPrimitiveGranularity     = {};
    uint32_t                    maxPreferredTaskWorkGroupInvocations  = {};
    uint32_t                    maxPreferredMeshWorkGroupInvocations  = {};
    Bool32                      prefersLocalInvocationVertexOutput    = {};
    Bool32                      prefersLocalInvocationPrimitiveOutput = {};
    Bool32                      prefersCompactVertexOutput            = {};
    Bool32                      prefersCompactPrimitiveOutput         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMeshShaderPropertiesEXT>
  {
    using Type = PhysicalDeviceMeshShaderPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesEXT>
  {
    using Type = PhysicalDeviceMeshShaderPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html
  struct PhysicalDeviceMeshShaderPropertiesNV
  {
    using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t                        maxDrawMeshTasksCount_             = {},
                                                                  uint32_t                        maxTaskWorkGroupInvocations_       = {},
                                                                  std::array<uint32_t, 3> const & maxTaskWorkGroupSize_              = {},
                                                                  uint32_t                        maxTaskTotalMemorySize_            = {},
                                                                  uint32_t                        maxTaskOutputCount_                = {},
                                                                  uint32_t                        maxMeshWorkGroupInvocations_       = {},
                                                                  std::array<uint32_t, 3> const & maxMeshWorkGroupSize_              = {},
                                                                  uint32_t                        maxMeshTotalMemorySize_            = {},
                                                                  uint32_t                        maxMeshOutputVertices_             = {},
                                                                  uint32_t                        maxMeshOutputPrimitives_           = {},
                                                                  uint32_t                        maxMeshMultiviewViewCount_         = {},
                                                                  uint32_t                        meshOutputPerVertexGranularity_    = {},
                                                                  uint32_t                        meshOutputPerPrimitiveGranularity_ = {},
                                                                  void *                          pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ }
      , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ }
      , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ }
      , maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ }
      , maxTaskOutputCount{ maxTaskOutputCount_ }
      , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ }
      , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ }
      , maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ }
      , maxMeshOutputVertices{ maxMeshOutputVertices_ }
      , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ }
      , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ }
      , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ }
      , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxDrawMeshTasksCount,
                       maxTaskWorkGroupInvocations,
                       maxTaskWorkGroupSize,
                       maxTaskTotalMemorySize,
                       maxTaskOutputCount,
                       maxMeshWorkGroupInvocations,
                       maxMeshWorkGroupSize,
                       maxMeshTotalMemorySize,
                       maxMeshOutputVertices,
                       maxMeshOutputPrimitives,
                       maxMeshMultiviewViewCount,
                       meshOutputPerVertexGranularity,
                       meshOutputPerPrimitiveGranularity );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) &&
             ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) &&
             ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) &&
             ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
             ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
             ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) &&
             ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) &&
             ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
#  endif
    }

    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                             = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
    void *                      pNext                             = {};
    uint32_t                    maxDrawMeshTasksCount             = {};
    uint32_t                    maxTaskWorkGroupInvocations       = {};
    ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize              = {};
    uint32_t                    maxTaskTotalMemorySize            = {};
    uint32_t                    maxTaskOutputCount                = {};
    uint32_t                    maxMeshWorkGroupInvocations       = {};
    ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize              = {};
    uint32_t                    maxMeshTotalMemorySize            = {};
    uint32_t                    maxMeshOutputVertices             = {};
    uint32_t                    maxMeshOutputPrimitives           = {};
    uint32_t                    maxMeshMultiviewViewCount         = {};
    uint32_t                    meshOutputPerVertexGranularity    = {};
    uint32_t                    meshOutputPerPrimitiveGranularity = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMeshShaderPropertiesNV>
  {
    using Type = PhysicalDeviceMeshShaderPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
  {
    using Type = PhysicalDeviceMeshShaderPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiDrawFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawFeaturesEXT.html
  struct PhysicalDeviceMultiDrawFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multiDraw{ multiDraw_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
    {
      multiDraw = multiDraw_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multiDraw );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
    void *        pNext     = {};
    Bool32        multiDraw = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiDrawFeaturesEXT>
  {
    using Type = PhysicalDeviceMultiDrawFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawFeaturesEXT>
  {
    using Type = PhysicalDeviceMultiDrawFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiDrawPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawPropertiesEXT.html
  struct PhysicalDeviceMultiDrawPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxMultiDrawCount{ maxMultiDrawCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMultiDrawPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxMultiDrawCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
    void *        pNext             = {};
    uint32_t      maxMultiDrawCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiDrawPropertiesEXT>
  {
    using Type = PhysicalDeviceMultiDrawPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawPropertiesEXT>
  {
    using Type = PhysicalDeviceMultiDrawPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.html
  struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( Bool32 multisampledRenderToSingleSampled_ = {},
                                                                                     void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
      operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
      operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &
      setMultisampledRenderToSingleSampled( Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT
    {
      multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multisampledRenderToSingleSampled );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled );
#  endif
    }

    bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
    void *        pNext                             = {};
    Bool32        multisampledRenderToSingleSampled = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
  {
    using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>
  {
    using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiviewFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewFeatures.html
  struct PhysicalDeviceMultiviewFeatures
  {
    using NativeType = VkPhysicalDeviceMultiviewFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiviewFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( Bool32 multiview_                   = {},
                                                          Bool32 multiviewGeometryShader_     = {},
                                                          Bool32 multiviewTessellationShader_ = {},
                                                          void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multiview{ multiview_ }
      , multiviewGeometryShader{ multiviewGeometryShader_ }
      , multiviewTessellationShader{ multiviewTessellationShader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
    {
      multiview = multiview_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewGeometryShader = multiviewGeometryShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewTessellationShader = multiviewTessellationShader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>( this );
    }

    operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>( this );
    }

    operator VkPhysicalDeviceMultiviewFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>( this );
    }

    operator VkPhysicalDeviceMultiviewFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
             ( multiviewTessellationShader == rhs.multiviewTessellationShader );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceMultiviewFeatures;
    void *        pNext                       = {};
    Bool32        multiview                   = {};
    Bool32        multiviewGeometryShader     = {};
    Bool32        multiviewTessellationShader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiviewFeatures>
  {
    using Type = PhysicalDeviceMultiviewFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
  {
    using Type = PhysicalDeviceMultiviewFeatures;
  };

  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;

  // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html
  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
  {
    using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( Bool32 perViewPositionAllComponents_ = {},
                                                                                void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , perViewPositionAllComponents{ perViewPositionAllComponents_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &
      operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, perViewPositionAllComponents );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
    void *        pNext                        = {};
    Bool32        perViewPositionAllComponents = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
  {
    using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
  {
    using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.html
  struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( Bool32 multiviewPerViewRenderAreas_ = {},
                                                                                void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast<PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &
      operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &
      setMultiviewPerViewRenderAreas( Bool32 multiviewPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multiviewPerViewRenderAreas );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
    void *        pNext                       = {};
    Bool32        multiviewPerViewRenderAreas = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM>
  {
    using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM>
  {
    using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM.html
  struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( Bool32 multiviewPerViewViewports_ = {},
                                                                              void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , multiviewPerViewViewports{ multiviewPerViewViewports_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &
      operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &
      setMultiviewPerViewViewports( Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewPerViewViewports = multiviewPerViewViewports_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, multiviewPerViewViewports );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
    void *        pNext                     = {};
    Bool32        multiviewPerViewViewports = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>
  {
    using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>
  {
    using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceMultiviewProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewProperties.html
  struct PhysicalDeviceMultiviewProperties
  {
    using NativeType = VkPhysicalDeviceMultiviewProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMultiviewProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_     = {},
                                                            uint32_t maxMultiviewInstanceIndex_ = {},
                                                            void *   pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxMultiviewViewCount{ maxMultiviewViewCount_ }
      , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>( this );
    }

    operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>( this );
    }

    operator VkPhysicalDeviceMultiviewProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>( this );
    }

    operator VkPhysicalDeviceMultiviewProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
             ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
#  endif
    }

    bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceMultiviewProperties;
    void *        pNext                     = {};
    uint32_t      maxMultiviewViewCount     = {};
    uint32_t      maxMultiviewInstanceIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMultiviewProperties>
  {
    using Type = PhysicalDeviceMultiviewProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
  {
    using Type = PhysicalDeviceMultiviewProperties;
  };

  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;

  // wrapper struct for struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT.html
  struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( Bool32 mutableDescriptorType_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , mutableDescriptorType{ mutableDescriptorType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setMutableDescriptorType( Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorType = mutableDescriptorType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, mutableDescriptorType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType );
#  endif
    }

    bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT;
    void *        pNext                 = {};
    Bool32        mutableDescriptorType = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT>
  {
    using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT>
  {
    using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
  };

  using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;

  // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferFeaturesEXT.html
  struct PhysicalDeviceNestedCommandBufferFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( Bool32 nestedCommandBuffer_                = {},
                                                                       Bool32 nestedCommandBufferRendering_       = {},
                                                                       Bool32 nestedCommandBufferSimultaneousUse_ = {},
                                                                       void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , nestedCommandBuffer{ nestedCommandBuffer_ }
      , nestedCommandBufferRendering{ nestedCommandBufferRendering_ }
      , nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast<PhysicalDeviceNestedCommandBufferFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceNestedCommandBufferFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBuffer( Bool32 nestedCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      nestedCommandBuffer = nestedCommandBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT &
      setNestedCommandBufferRendering( Bool32 nestedCommandBufferRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      nestedCommandBufferRendering = nestedCommandBufferRendering_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT &
      setNestedCommandBufferSimultaneousUse( Bool32 nestedCommandBufferSimultaneousUse_ ) VULKAN_HPP_NOEXCEPT
    {
      nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceNestedCommandBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceNestedCommandBufferFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nestedCommandBuffer == rhs.nestedCommandBuffer ) &&
             ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering ) &&
             ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse );
#  endif
    }

    bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT;
    void *        pNext                              = {};
    Bool32        nestedCommandBuffer                = {};
    Bool32        nestedCommandBufferRendering       = {};
    Bool32        nestedCommandBufferSimultaneousUse = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceNestedCommandBufferFeaturesEXT>
  {
    using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT>
  {
    using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferPropertiesEXT.html
  struct PhysicalDeviceNestedCommandBufferPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {},
                                                                         void *   pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast<PhysicalDeviceNestedCommandBufferPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceNestedCommandBufferPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceNestedCommandBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceNestedCommandBufferPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceNestedCommandBufferPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxCommandBufferNestingLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel );
#  endif
    }

    bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT;
    void *        pNext                        = {};
    uint32_t      maxCommandBufferNestingLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceNestedCommandBufferPropertiesEXT>
  {
    using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT>
  {
    using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT.html
  struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( Bool32 nonSeamlessCubeMap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , nonSeamlessCubeMap{ nonSeamlessCubeMap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setNonSeamlessCubeMap( Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT
    {
      nonSeamlessCubeMap = nonSeamlessCubeMap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, nonSeamlessCubeMap );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap );
#  endif
    }

    bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
    void *        pNext              = {};
    Bool32        nonSeamlessCubeMap = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
  {
    using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT>
  {
    using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceOpacityMicromapFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapFeaturesEXT.html
  struct PhysicalDeviceOpacityMicromapFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( Bool32 micromap_              = {},
                                                                   Bool32 micromapCaptureReplay_ = {},
                                                                   Bool32 micromapHostCommands_  = {},
                                                                   void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , micromap{ micromap_ }
      , micromapCaptureReplay{ micromapCaptureReplay_ }
      , micromapHostCommands{ micromapHostCommands_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceOpacityMicromapFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT
    {
      micromap = micromap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapCaptureReplay( Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      micromapCaptureReplay = micromapCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapHostCommands( Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      micromapHostCommands = micromapHostCommands_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceOpacityMicromapFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) &&
             ( micromapHostCommands == rhs.micromapHostCommands );
#  endif
    }

    bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT;
    void *        pNext                 = {};
    Bool32        micromap              = {};
    Bool32        micromapCaptureReplay = {};
    Bool32        micromapHostCommands  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceOpacityMicromapFeaturesEXT>
  {
    using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT>
  {
    using Type = PhysicalDeviceOpacityMicromapFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceOpacityMicromapPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapPropertiesEXT.html
  struct PhysicalDeviceOpacityMicromapPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {},
                                                                     uint32_t maxOpacity4StateSubdivisionLevel_ = {},
                                                                     void *   pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ }
      , maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceOpacityMicromapPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceOpacityMicromapPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceOpacityMicromapPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) &&
             ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel );
#  endif
    }

    bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT;
    void *        pNext                            = {};
    uint32_t      maxOpacity2StateSubdivisionLevel = {};
    uint32_t      maxOpacity4StateSubdivisionLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceOpacityMicromapPropertiesEXT>
  {
    using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT>
  {
    using Type = PhysicalDeviceOpacityMicromapPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceOpticalFlowFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowFeaturesNV.html
  struct PhysicalDeviceOpticalFlowFeaturesNV
  {
    using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , opticalFlow{ opticalFlow_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceOpticalFlowFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT
    {
      opticalFlow = opticalFlow_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceOpticalFlowFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, opticalFlow );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow );
#  endif
    }

    bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV;
    void *        pNext       = {};
    Bool32        opticalFlow = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceOpticalFlowFeaturesNV>
  {
    using Type = PhysicalDeviceOpticalFlowFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowFeaturesNV>
  {
    using Type = PhysicalDeviceOpticalFlowFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceOpticalFlowPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowPropertiesNV.html
  struct PhysicalDeviceOpticalFlowPropertiesNV
  {
    using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_   = {},
                                                                OpticalFlowGridSizeFlagsNV supportedHintGridSizes_     = {},
                                                                Bool32                     hintSupported_              = {},
                                                                Bool32                     costSupported_              = {},
                                                                Bool32                     bidirectionalFlowSupported_ = {},
                                                                Bool32                     globalFlowSupported_        = {},
                                                                uint32_t                   minWidth_                   = {},
                                                                uint32_t                   minHeight_                  = {},
                                                                uint32_t                   maxWidth_                   = {},
                                                                uint32_t                   maxHeight_                  = {},
                                                                uint32_t                   maxNumRegionsOfInterest_    = {},
                                                                void *                     pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportedOutputGridSizes{ supportedOutputGridSizes_ }
      , supportedHintGridSizes{ supportedHintGridSizes_ }
      , hintSupported{ hintSupported_ }
      , costSupported{ costSupported_ }
      , bidirectionalFlowSupported{ bidirectionalFlowSupported_ }
      , globalFlowSupported{ globalFlowSupported_ }
      , minWidth{ minWidth_ }
      , minHeight{ minHeight_ }
      , maxWidth{ maxWidth_ }
      , maxHeight{ maxHeight_ }
      , maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceOpticalFlowPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceOpticalFlowPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceOpticalFlowPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               OpticalFlowGridSizeFlagsNV const &,
               OpticalFlowGridSizeFlagsNV const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       supportedOutputGridSizes,
                       supportedHintGridSizes,
                       hintSupported,
                       costSupported,
                       bidirectionalFlowSupported,
                       globalFlowSupported,
                       minWidth,
                       minHeight,
                       maxWidth,
                       maxHeight,
                       maxNumRegionsOfInterest );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) &&
             ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) &&
             ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) &&
             ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) &&
             ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest );
#  endif
    }

    bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                      = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV;
    void *                     pNext                      = {};
    OpticalFlowGridSizeFlagsNV supportedOutputGridSizes   = {};
    OpticalFlowGridSizeFlagsNV supportedHintGridSizes     = {};
    Bool32                     hintSupported              = {};
    Bool32                     costSupported              = {};
    Bool32                     bidirectionalFlowSupported = {};
    Bool32                     globalFlowSupported        = {};
    uint32_t                   minWidth                   = {};
    uint32_t                   minHeight                  = {};
    uint32_t                   maxWidth                   = {};
    uint32_t                   maxHeight                  = {};
    uint32_t                   maxNumRegionsOfInterest    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceOpticalFlowPropertiesNV>
  {
    using Type = PhysicalDeviceOpticalFlowPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceOpticalFlowPropertiesNV>
  {
    using Type = PhysicalDeviceOpticalFlowPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDevicePCIBusInfoPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html
  struct PhysicalDevicePCIBusInfoPropertiesEXT
  {
    using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(
      uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pciDomain{ pciDomain_ }
      , pciBus{ pciBus_ }
      , pciDevice{ pciDevice_ }
      , pciFunction{ pciFunction_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) &&
             ( pciFunction == rhs.pciFunction );
#  endif
    }

    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
    void *        pNext       = {};
    uint32_t      pciDomain   = {};
    uint32_t      pciBus      = {};
    uint32_t      pciDevice   = {};
    uint32_t      pciFunction = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePCIBusInfoPropertiesEXT>
  {
    using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
  {
    using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT.html
  struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( Bool32 pageableDeviceLocalMemory_ = {},
                                                                             void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
      operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &
      setPageableDeviceLocalMemory( Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      pageableDeviceLocalMemory = pageableDeviceLocalMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pageableDeviceLocalMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
#  endif
    }

    bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
    void *        pNext                     = {};
    Bool32        pageableDeviceLocalMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
  {
    using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
  {
    using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV.html
  struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV
  {
    using NativeType = VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( Bool32 partitionedAccelerationStructure_ = {},
                                                                                   void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , partitionedAccelerationStructure{ partitionedAccelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePartitionedAccelerationStructureFeaturesNV( *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructureFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDevicePartitionedAccelerationStructureFeaturesNV &
      operator=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePartitionedAccelerationStructureFeaturesNV &
      operator=( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructureFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV &
      setPartitionedAccelerationStructure( Bool32 partitionedAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      partitionedAccelerationStructure = partitionedAccelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, partitionedAccelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( partitionedAccelerationStructure == rhs.partitionedAccelerationStructure );
#  endif
    }

    bool operator!=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV;
    void *        pNext                            = {};
    Bool32        partitionedAccelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV>
  {
    using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV>
  {
    using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV.html
  struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV
  {
    using NativeType = VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( uint32_t maxPartitionCount_ = {},
                                                                                     void *   pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxPartitionCount{ maxPartitionCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePartitionedAccelerationStructurePropertiesNV( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePartitionedAccelerationStructurePropertiesNV(
          *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructurePropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDevicePartitionedAccelerationStructurePropertiesNV &
      operator=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePartitionedAccelerationStructurePropertiesNV &
      operator=( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePartitionedAccelerationStructurePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *>( this );
    }

    operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxPartitionCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPartitionCount == rhs.maxPartitionCount );
#  endif
    }

    bool operator!=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV;
    void *        pNext             = {};
    uint32_t      maxPartitionCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV>
  {
    using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV>
  {
    using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerStageDescriptorSetFeaturesNV.html
  struct PhysicalDevicePerStageDescriptorSetFeaturesNV
  {
    using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( Bool32 perStageDescriptorSet_ = {},
                                                                        Bool32 dynamicPipelineLayout_ = {},
                                                                        void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , perStageDescriptorSet{ perStageDescriptorSet_ }
      , dynamicPipelineLayout{ dynamicPipelineLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast<PhysicalDevicePerStageDescriptorSetFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePerStageDescriptorSetFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPerStageDescriptorSet( Bool32 perStageDescriptorSet_ ) VULKAN_HPP_NOEXCEPT
    {
      perStageDescriptorSet = perStageDescriptorSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setDynamicPipelineLayout( Bool32 dynamicPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicPipelineLayout = dynamicPipelineLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerStageDescriptorSetFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerStageDescriptorSetFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePerStageDescriptorSetFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePerStageDescriptorSetFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perStageDescriptorSet == rhs.perStageDescriptorSet ) &&
             ( dynamicPipelineLayout == rhs.dynamicPipelineLayout );
#  endif
    }

    bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV;
    void *        pNext                 = {};
    Bool32        perStageDescriptorSet = {};
    Bool32        dynamicPipelineLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePerStageDescriptorSetFeaturesNV>
  {
    using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV>
  {
    using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDevicePerformanceCountersByRegionFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceCountersByRegionFeaturesARM.html
  struct PhysicalDevicePerformanceCountersByRegionFeaturesARM
  {
    using NativeType = VkPhysicalDevicePerformanceCountersByRegionFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePerformanceCountersByRegionFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionFeaturesARM( Bool32 performanceCountersByRegion_ = {},
                                                                               void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , performanceCountersByRegion{ performanceCountersByRegion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePerformanceCountersByRegionFeaturesARM( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceCountersByRegionFeaturesARM( VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceCountersByRegionFeaturesARM( *reinterpret_cast<PhysicalDevicePerformanceCountersByRegionFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDevicePerformanceCountersByRegionFeaturesARM &
      operator=( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePerformanceCountersByRegionFeaturesARM & operator=( VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePerformanceCountersByRegionFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM &
      setPerformanceCountersByRegion( Bool32 performanceCountersByRegion_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCountersByRegion = performanceCountersByRegion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, performanceCountersByRegion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersByRegion == rhs.performanceCountersByRegion );
#  endif
    }

    bool operator!=( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDevicePerformanceCountersByRegionFeaturesARM;
    void *        pNext                       = {};
    Bool32        performanceCountersByRegion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePerformanceCountersByRegionFeaturesARM>
  {
    using Type = PhysicalDevicePerformanceCountersByRegionFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceCountersByRegionFeaturesARM>
  {
    using Type = PhysicalDevicePerformanceCountersByRegionFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDevicePerformanceCountersByRegionPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceCountersByRegionPropertiesARM.html
  struct PhysicalDevicePerformanceCountersByRegionPropertiesARM
  {
    using NativeType = VkPhysicalDevicePerformanceCountersByRegionPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePerformanceCountersByRegionPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionPropertiesARM( uint32_t maxPerRegionPerformanceCounters_ = {},
                                                                                 Extent2D performanceCounterRegionSize_    = {},
                                                                                 uint32_t rowStrideAlignment_              = {},
                                                                                 uint32_t regionAlignment_                 = {},
                                                                                 Bool32   identityTransformOrder_          = {},
                                                                                 void *   pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxPerRegionPerformanceCounters{ maxPerRegionPerformanceCounters_ }
      , performanceCounterRegionSize{ performanceCounterRegionSize_ }
      , rowStrideAlignment{ rowStrideAlignment_ }
      , regionAlignment{ regionAlignment_ }
      , identityTransformOrder{ identityTransformOrder_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionPropertiesARM( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceCountersByRegionPropertiesARM( VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceCountersByRegionPropertiesARM( *reinterpret_cast<PhysicalDevicePerformanceCountersByRegionPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDevicePerformanceCountersByRegionPropertiesARM &
      operator=( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePerformanceCountersByRegionPropertiesARM &
      operator=( VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePerformanceCountersByRegionPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *>( this );
    }

    operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, Extent2D const &, uint32_t const &, uint32_t const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, maxPerRegionPerformanceCounters, performanceCounterRegionSize, rowStrideAlignment, regionAlignment, identityTransformOrder );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerRegionPerformanceCounters == rhs.maxPerRegionPerformanceCounters ) &&
             ( performanceCounterRegionSize == rhs.performanceCounterRegionSize ) && ( rowStrideAlignment == rhs.rowStrideAlignment ) &&
             ( regionAlignment == rhs.regionAlignment ) && ( identityTransformOrder == rhs.identityTransformOrder );
#  endif
    }

    bool operator!=( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDevicePerformanceCountersByRegionPropertiesARM;
    void *        pNext                           = {};
    uint32_t      maxPerRegionPerformanceCounters = {};
    Extent2D      performanceCounterRegionSize    = {};
    uint32_t      rowStrideAlignment              = {};
    uint32_t      regionAlignment                 = {};
    Bool32        identityTransformOrder          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePerformanceCountersByRegionPropertiesARM>
  {
    using Type = PhysicalDevicePerformanceCountersByRegionPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceCountersByRegionPropertiesARM>
  {
    using Type = PhysicalDevicePerformanceCountersByRegionPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDevicePerformanceQueryFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryFeaturesKHR.html
  struct PhysicalDevicePerformanceQueryFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( Bool32 performanceCounterQueryPools_         = {},
                                                                    Bool32 performanceCounterMultipleQueryPools_ = {},
                                                                    void * pNext_                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , performanceCounterQueryPools{ performanceCounterQueryPools_ }
      , performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
      setPerformanceCounterQueryPools( Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCounterQueryPools = performanceCounterQueryPools_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
      setPerformanceCounterMultipleQueryPools( Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) &&
             ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
#  endif
    }

    bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
    void *        pNext                                = {};
    Bool32        performanceCounterQueryPools         = {};
    Bool32        performanceCounterMultipleQueryPools = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePerformanceQueryFeaturesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePerformanceQueryPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryPropertiesKHR.html
  struct PhysicalDevicePerformanceQueryPropertiesKHR
  {
    using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( Bool32 allowCommandBufferQueryCopies_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allowCommandBufferQueryCopies );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
#  endif
    }

    bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
    void *        pNext                         = {};
    Bool32        allowCommandBufferQueryCopies = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePerformanceQueryPropertiesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineBinaryFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryFeaturesKHR.html
  struct PhysicalDevicePipelineBinaryFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( Bool32 pipelineBinaries_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBinaries{ pipelineBinaries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineBinaryFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineBinaryFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( Bool32 pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBinaries = pipelineBinaries_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineBinaryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineBinaryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineBinaryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineBinaryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineBinaries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaries == rhs.pipelineBinaries );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR;
    void *        pNext            = {};
    Bool32        pipelineBinaries = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineBinaryFeaturesKHR>
  {
    using Type = PhysicalDevicePipelineBinaryFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR>
  {
    using Type = PhysicalDevicePipelineBinaryFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineBinaryPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryPropertiesKHR.html
  struct PhysicalDevicePipelineBinaryPropertiesKHR
  {
    using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( Bool32 pipelineBinaryInternalCache_            = {},
                                                                    Bool32 pipelineBinaryInternalCacheControl_     = {},
                                                                    Bool32 pipelineBinaryPrefersInternalCache_     = {},
                                                                    Bool32 pipelineBinaryPrecompiledInternalCache_ = {},
                                                                    Bool32 pipelineBinaryCompressedData_           = {},
                                                                    void * pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ }
      , pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ }
      , pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ }
      , pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ }
      , pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast<PhysicalDevicePipelineBinaryPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineBinaryPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineBinaryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineBinaryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineBinaryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineBinaryPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineBinaryPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       pipelineBinaryInternalCache,
                       pipelineBinaryInternalCacheControl,
                       pipelineBinaryPrefersInternalCache,
                       pipelineBinaryPrecompiledInternalCache,
                       pipelineBinaryCompressedData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache ) &&
             ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl ) &&
             ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache ) &&
             ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache ) &&
             ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR;
    void *        pNext                                  = {};
    Bool32        pipelineBinaryInternalCache            = {};
    Bool32        pipelineBinaryInternalCacheControl     = {};
    Bool32        pipelineBinaryPrefersInternalCache     = {};
    Bool32        pipelineBinaryPrecompiledInternalCache = {};
    Bool32        pipelineBinaryCompressedData           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineBinaryPropertiesKHR>
  {
    using Type = PhysicalDevicePipelineBinaryPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR>
  {
    using Type = PhysicalDevicePipelineBinaryPropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC.html
  struct PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC
  {
    using NativeType = VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( Bool32 pipelineCacheIncrementalMode_ = {},
                                                                                void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineCacheIncrementalMode{ pipelineCacheIncrementalMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( *reinterpret_cast<PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC &
      operator=( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & operator=( VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC &
      setPipelineCacheIncrementalMode( Bool32 pipelineCacheIncrementalMode_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCacheIncrementalMode = pipelineCacheIncrementalMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *>( this );
    }

    operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *>( this );
    }

    operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *>( this );
    }

    operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineCacheIncrementalMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCacheIncrementalMode == rhs.pipelineCacheIncrementalMode );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC;
    void *        pNext                        = {};
    Bool32        pipelineCacheIncrementalMode = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC>
  {
    using Type = PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC>
  {
    using Type = PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineCreationCacheControlFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCreationCacheControlFeatures.html
  struct PhysicalDevicePipelineCreationCacheControlFeatures
  {
    using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( Bool32 pipelineCreationCacheControl_ = {},
                                                                             void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineCreationCacheControl{ pipelineCreationCacheControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineCreationCacheControlFeatures &
      operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures &
      setPipelineCreationCacheControl( Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCreationCacheControl = pipelineCreationCacheControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineCreationCacheControlFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineCreationCacheControlFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineCreationCacheControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
    void *        pNext                        = {};
    Bool32        pipelineCreationCacheControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineCreationCacheControlFeatures>
  {
    using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
  {
    using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
  };

  using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;

  // wrapper struct for struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR.html
  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( Bool32 pipelineExecutableInfo_ = {},
                                                                                void * pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineExecutableInfo{ pipelineExecutableInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
      operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
      setPipelineExecutableInfo( Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineExecutableInfo = pipelineExecutableInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineExecutableInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
    void *        pNext                  = {};
    Bool32        pipelineExecutableInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
  {
    using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
  {
    using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT.html
  struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( Bool32 pipelineLibraryGroupHandles_ = {},
                                                                               void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &
      operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &
      setPipelineLibraryGroupHandles( Bool32 pipelineLibraryGroupHandles_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineLibraryGroupHandles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
    void *        pNext                       = {};
    Bool32        pipelineLibraryGroupHandles = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT>
  {
    using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT>
  {
    using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineOpacityMicromapFeaturesARM.html
  struct PhysicalDevicePipelineOpacityMicromapFeaturesARM
  {
    using NativeType = VkPhysicalDevicePipelineOpacityMicromapFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( Bool32 pipelineOpacityMicromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineOpacityMicromap{ pipelineOpacityMicromap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineOpacityMicromapFeaturesARM( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineOpacityMicromapFeaturesARM( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineOpacityMicromapFeaturesARM( *reinterpret_cast<PhysicalDevicePipelineOpacityMicromapFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineOpacityMicromapFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPipelineOpacityMicromap( Bool32 pipelineOpacityMicromap_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineOpacityMicromap = pipelineOpacityMicromap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *>( this );
    }

    operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineOpacityMicromap );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineOpacityMicromap == rhs.pipelineOpacityMicromap );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM;
    void *        pNext                   = {};
    Bool32        pipelineOpacityMicromap = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineOpacityMicromapFeaturesARM>
  {
    using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM>
  {
    using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDevicePipelinePropertiesFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelinePropertiesFeaturesEXT.html
  struct PhysicalDevicePipelinePropertiesFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( Bool32 pipelinePropertiesIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelinePropertiesFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT &
      setPipelinePropertiesIdentifier( Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelinePropertiesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelinePropertiesFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelinePropertiesIdentifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier );
#  endif
    }

    bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT;
    void *        pNext                        = {};
    Bool32        pipelinePropertiesIdentifier = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelinePropertiesFeaturesEXT>
  {
    using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT>
  {
    using Type = PhysicalDevicePipelinePropertiesFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePipelineProtectedAccessFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineProtectedAccessFeatures.html
  struct PhysicalDevicePipelineProtectedAccessFeatures
  {
    using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( Bool32 pipelineProtectedAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineProtectedAccess{ pipelineProtectedAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineProtectedAccessFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPipelineProtectedAccess( Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineProtectedAccess = pipelineProtectedAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineProtectedAccessFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineProtectedAccessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineProtectedAccessFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineProtectedAccessFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineProtectedAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures;
    void *        pNext                   = {};
    Bool32        pipelineProtectedAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineProtectedAccessFeatures>
  {
    using Type = PhysicalDevicePipelineProtectedAccessFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineProtectedAccessFeatures>
  {
    using Type = PhysicalDevicePipelineProtectedAccessFeatures;
  };

  using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures;

  // wrapper struct for struct VkPhysicalDevicePipelineRobustnessFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessFeatures.html
  struct PhysicalDevicePipelineRobustnessFeatures
  {
    using NativeType = VkPhysicalDevicePipelineRobustnessFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineRobustnessFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( Bool32 pipelineRobustness_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineRobustness{ pipelineRobustness_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast<PhysicalDevicePipelineRobustnessFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineRobustnessFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPipelineRobustness( Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineRobustness = pipelineRobustness_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineRobustnessFeatures *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineRobustnessFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineRobustness );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDevicePipelineRobustnessFeatures;
    void *        pNext              = {};
    Bool32        pipelineRobustness = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineRobustnessFeatures>
  {
    using Type = PhysicalDevicePipelineRobustnessFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessFeatures>
  {
    using Type = PhysicalDevicePipelineRobustnessFeatures;
  };

  using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures;

  // wrapper struct for struct VkPhysicalDevicePipelineRobustnessProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessProperties.html
  struct PhysicalDevicePipelineRobustnessProperties
  {
    using NativeType = VkPhysicalDevicePipelineRobustnessProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePipelineRobustnessProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties(
      PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault,
      PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault,
      PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_   = PipelineRobustnessBufferBehavior::eDeviceDefault,
      PipelineRobustnessImageBehavior  defaultRobustnessImages_         = PipelineRobustnessImageBehavior::eDeviceDefault,
      void *                           pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ }
      , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ }
      , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ }
      , defaultRobustnessImages{ defaultRobustnessImages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast<PhysicalDevicePipelineRobustnessProperties const *>( &rhs ) )
    {
    }

    PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePipelineRobustnessProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineRobustnessProperties *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineRobustnessProperties *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePipelineRobustnessProperties *>( this );
    }

    operator VkPhysicalDevicePipelineRobustnessProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePipelineRobustnessProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessImageBehavior const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) &&
             ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) &&
             ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages );
#  endif
    }

    bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                           = StructureType::ePhysicalDevicePipelineRobustnessProperties;
    void *                           pNext                           = {};
    PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs   = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessImageBehavior  defaultRobustnessImages         = PipelineRobustnessImageBehavior::eDeviceDefault;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePipelineRobustnessProperties>
  {
    using Type = PhysicalDevicePipelineRobustnessProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineRobustnessProperties>
  {
    using Type = PhysicalDevicePipelineRobustnessProperties;
  };

  using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties;

  // wrapper struct for struct VkPhysicalDevicePointClippingProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePointClippingProperties.html
  struct PhysicalDevicePointClippingProperties
  {
    using NativeType = VkPhysicalDevicePointClippingProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePointClippingProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PointClippingBehavior pointClippingBehavior_ = PointClippingBehavior::eAllClipPlanes,
                                                                void *                pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pointClippingBehavior{ pointClippingBehavior_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
    {
    }

    PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>( this );
    }

    operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties *>( this );
    }

    operator VkPhysicalDevicePointClippingProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>( this );
    }

    operator VkPhysicalDevicePointClippingProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePointClippingProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PointClippingBehavior const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pointClippingBehavior );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
#else
    bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior );
#  endif
    }

    bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType                 = StructureType::ePhysicalDevicePointClippingProperties;
    void *                pNext                 = {};
    PointClippingBehavior pointClippingBehavior = PointClippingBehavior::eAllClipPlanes;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePointClippingProperties>
  {
    using Type = PhysicalDevicePointClippingProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
  {
    using Type = PhysicalDevicePointClippingProperties;
  };

  using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDevicePortabilitySubsetFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetFeaturesKHR.html
  struct PhysicalDevicePortabilitySubsetFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( Bool32 constantAlphaColorBlendFactors_         = {},
                                                                     Bool32 events_                                 = {},
                                                                     Bool32 imageViewFormatReinterpretation_        = {},
                                                                     Bool32 imageViewFormatSwizzle_                 = {},
                                                                     Bool32 imageView2DOn3DImage_                   = {},
                                                                     Bool32 multisampleArrayImage_                  = {},
                                                                     Bool32 mutableComparisonSamplers_              = {},
                                                                     Bool32 pointPolygons_                          = {},
                                                                     Bool32 samplerMipLodBias_                      = {},
                                                                     Bool32 separateStencilMaskRef_                 = {},
                                                                     Bool32 shaderSampleRateInterpolationFunctions_ = {},
                                                                     Bool32 tessellationIsolines_                   = {},
                                                                     Bool32 tessellationPointMode_                  = {},
                                                                     Bool32 triangleFans_                           = {},
                                                                     Bool32 vertexAttributeAccessBeyondStride_      = {},
                                                                     void * pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ }
      , events{ events_ }
      , imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ }
      , imageViewFormatSwizzle{ imageViewFormatSwizzle_ }
      , imageView2DOn3DImage{ imageView2DOn3DImage_ }
      , multisampleArrayImage{ multisampleArrayImage_ }
      , mutableComparisonSamplers{ mutableComparisonSamplers_ }
      , pointPolygons{ pointPolygons_ }
      , samplerMipLodBias{ samplerMipLodBias_ }
      , separateStencilMaskRef{ separateStencilMaskRef_ }
      , shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ }
      , tessellationIsolines{ tessellationIsolines_ }
      , tessellationPointMode{ tessellationPointMode_ }
      , triangleFans{ triangleFans_ }
      , vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
      setConstantAlphaColorBlendFactors( Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
    {
      constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( Bool32 events_ ) VULKAN_HPP_NOEXCEPT
    {
      events = events_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
      setImageViewFormatReinterpretation( Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewFormatSwizzle = imageViewFormatSwizzle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView2DOn3DImage = imageView2DOn3DImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
    {
      multisampleArrayImage = multisampleArrayImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableComparisonSamplers = mutableComparisonSamplers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
    {
      pointPolygons = pointPolygons_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerMipLodBias = samplerMipLodBias_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
    {
      separateStencilMaskRef = separateStencilMaskRef_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
      setShaderSampleRateInterpolationFunctions( Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationIsolines = tessellationIsolines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationPointMode = tessellationPointMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleFans = triangleFans_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
      setVertexAttributeAccessBeyondStride( Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       constantAlphaColorBlendFactors,
                       events,
                       imageViewFormatReinterpretation,
                       imageViewFormatSwizzle,
                       imageView2DOn3DImage,
                       multisampleArrayImage,
                       mutableComparisonSamplers,
                       pointPolygons,
                       samplerMipLodBias,
                       separateStencilMaskRef,
                       shaderSampleRateInterpolationFunctions,
                       tessellationIsolines,
                       tessellationPointMode,
                       triangleFans,
                       vertexAttributeAccessBeyondStride );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default;
#  else
    bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) &&
             ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) &&
             ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) &&
             ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) &&
             ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) &&
             ( separateStencilMaskRef == rhs.separateStencilMaskRef ) &&
             ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) &&
             ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) &&
             ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
#    endif
    }

    bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
    void *        pNext                                  = {};
    Bool32        constantAlphaColorBlendFactors         = {};
    Bool32        events                                 = {};
    Bool32        imageViewFormatReinterpretation        = {};
    Bool32        imageViewFormatSwizzle                 = {};
    Bool32        imageView2DOn3DImage                   = {};
    Bool32        multisampleArrayImage                  = {};
    Bool32        mutableComparisonSamplers              = {};
    Bool32        pointPolygons                          = {};
    Bool32        samplerMipLodBias                      = {};
    Bool32        separateStencilMaskRef                 = {};
    Bool32        shaderSampleRateInterpolationFunctions = {};
    Bool32        tessellationIsolines                   = {};
    Bool32        tessellationPointMode                  = {};
    Bool32        triangleFans                           = {};
    Bool32        vertexAttributeAccessBeyondStride      = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePortabilitySubsetFeaturesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDevicePortabilitySubsetPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetPropertiesKHR.html
  struct PhysicalDevicePortabilitySubsetPropertiesKHR
  {
    using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {},
                                                                       void *   pNext_                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minVertexInputBindingStrideAlignment );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default;
#  else
    bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
#    endif
    }

    bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                                = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
    void *        pNext                                = {};
    uint32_t      minVertexInputBindingStrideAlignment = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePortabilitySubsetPropertiesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDevicePresentBarrierFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentBarrierFeaturesNV.html
  struct PhysicalDevicePresentBarrierFeaturesNV
  {
    using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( Bool32 presentBarrier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentBarrier{ presentBarrier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentBarrierFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT
    {
      presentBarrier = presentBarrier_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentBarrierFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentBarrierFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentBarrierFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentBarrier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier );
#  endif
    }

    bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDevicePresentBarrierFeaturesNV;
    void *        pNext          = {};
    Bool32        presentBarrier = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentBarrierFeaturesNV>
  {
    using Type = PhysicalDevicePresentBarrierFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentBarrierFeaturesNV>
  {
    using Type = PhysicalDevicePresentBarrierFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDevicePresentId2FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentId2FeaturesKHR.html
  struct PhysicalDevicePresentId2FeaturesKHR
  {
    using NativeType = VkPhysicalDevicePresentId2FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentId2FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR( Bool32 presentId2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentId2{ presentId2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentId2FeaturesKHR( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentId2FeaturesKHR( *reinterpret_cast<PhysicalDevicePresentId2FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentId2FeaturesKHR & operator=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentId2FeaturesKHR & operator=( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentId2FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPresentId2( Bool32 presentId2_ ) VULKAN_HPP_NOEXCEPT
    {
      presentId2 = presentId2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentId2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentId2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentId2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentId2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentId2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentId2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentId2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentId2FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentId2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentId2FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId2 == rhs.presentId2 );
#  endif
    }

    bool operator!=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::ePhysicalDevicePresentId2FeaturesKHR;
    void *        pNext      = {};
    Bool32        presentId2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentId2FeaturesKHR>
  {
    using Type = PhysicalDevicePresentId2FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentId2FeaturesKHR>
  {
    using Type = PhysicalDevicePresentId2FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePresentIdFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentIdFeaturesKHR.html
  struct PhysicalDevicePresentIdFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentIdFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentId{ presentId_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
    {
      presentId = presentId_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentIdFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentIdFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentId );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId );
#  endif
    }

    bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
    void *        pNext     = {};
    Bool32        presentId = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentIdFeaturesKHR>
  {
    using Type = PhysicalDevicePresentIdFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentIdFeaturesKHR>
  {
    using Type = PhysicalDevicePresentIdFeaturesKHR;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDevicePresentMeteringFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentMeteringFeaturesNV.html
  struct PhysicalDevicePresentMeteringFeaturesNV
  {
    using NativeType = VkPhysicalDevicePresentMeteringFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentMeteringFeaturesNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( Bool32 presentMetering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentMetering{ presentMetering_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentMeteringFeaturesNV( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentMeteringFeaturesNV( *reinterpret_cast<PhysicalDevicePresentMeteringFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentMeteringFeaturesNV & operator=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentMeteringFeaturesNV & operator=( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentMeteringFeaturesNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPresentMetering( Bool32 presentMetering_ ) VULKAN_HPP_NOEXCEPT
    {
      presentMetering = presentMetering_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentMeteringFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentMeteringFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentMeteringFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentMeteringFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentMeteringFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentMeteringFeaturesNV *>( this );
    }

    operator VkPhysicalDevicePresentMeteringFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentMeteringFeaturesNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentMetering );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentMeteringFeaturesNV const & ) const = default;
#  else
    bool operator==( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMetering == rhs.presentMetering );
#    endif
    }

    bool operator!=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType           = StructureType::ePhysicalDevicePresentMeteringFeaturesNV;
    void *        pNext           = {};
    Bool32        presentMetering = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentMeteringFeaturesNV>
  {
    using Type = PhysicalDevicePresentMeteringFeaturesNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentMeteringFeaturesNV>
  {
    using Type = PhysicalDevicePresentMeteringFeaturesNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR.html
  struct PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( Bool32 presentModeFifoLatestReady_ = {},
                                                                              void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentModeFifoLatestReady{ presentModeFifoLatestReady_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR &
      operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR &
      setPresentModeFifoLatestReady( Bool32 presentModeFifoLatestReady_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeFifoLatestReady = presentModeFifoLatestReady_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentModeFifoLatestReady );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady );
#  endif
    }

    bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;
    void *        pNext                      = {};
    Bool32        presentModeFifoLatestReady = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR>
  {
    using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesKHR>
  {
    using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;
  };

  using PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR;

  // wrapper struct for struct VkPhysicalDevicePresentTimingFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentTimingFeaturesEXT.html
  struct PhysicalDevicePresentTimingFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePresentTimingFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentTimingFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentTimingFeaturesEXT( Bool32 presentTiming_         = {},
                                                                 Bool32 presentAtAbsoluteTime_ = {},
                                                                 Bool32 presentAtRelativeTime_ = {},
                                                                 void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentTiming{ presentTiming_ }
      , presentAtAbsoluteTime{ presentAtAbsoluteTime_ }
      , presentAtRelativeTime{ presentAtRelativeTime_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentTimingFeaturesEXT( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentTimingFeaturesEXT( VkPhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentTimingFeaturesEXT( *reinterpret_cast<PhysicalDevicePresentTimingFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentTimingFeaturesEXT & operator=( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentTimingFeaturesEXT & operator=( VkPhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentTimingFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentTiming( Bool32 presentTiming_ ) VULKAN_HPP_NOEXCEPT
    {
      presentTiming = presentTiming_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentAtAbsoluteTime( Bool32 presentAtAbsoluteTime_ ) VULKAN_HPP_NOEXCEPT
    {
      presentAtAbsoluteTime = presentAtAbsoluteTime_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentAtRelativeTime( Bool32 presentAtRelativeTime_ ) VULKAN_HPP_NOEXCEPT
    {
      presentAtRelativeTime = presentAtRelativeTime_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentTimingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentTimingFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePresentTimingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentTimingFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePresentTimingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentTimingFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePresentTimingFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentTimingFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentTiming, presentAtAbsoluteTime, presentAtRelativeTime );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentTimingFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentTiming == rhs.presentTiming ) &&
             ( presentAtAbsoluteTime == rhs.presentAtAbsoluteTime ) && ( presentAtRelativeTime == rhs.presentAtRelativeTime );
#  endif
    }

    bool operator!=( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDevicePresentTimingFeaturesEXT;
    void *        pNext                 = {};
    Bool32        presentTiming         = {};
    Bool32        presentAtAbsoluteTime = {};
    Bool32        presentAtRelativeTime = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentTimingFeaturesEXT>
  {
    using Type = PhysicalDevicePresentTimingFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentTimingFeaturesEXT>
  {
    using Type = PhysicalDevicePresentTimingFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePresentWait2FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWait2FeaturesKHR.html
  struct PhysicalDevicePresentWait2FeaturesKHR
  {
    using NativeType = VkPhysicalDevicePresentWait2FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentWait2FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR( Bool32 presentWait2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentWait2{ presentWait2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentWait2FeaturesKHR( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentWait2FeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWait2FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentWait2FeaturesKHR & operator=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentWait2FeaturesKHR & operator=( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentWait2FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPresentWait2( Bool32 presentWait2_ ) VULKAN_HPP_NOEXCEPT
    {
      presentWait2 = presentWait2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentWait2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentWait2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWait2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentWait2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWait2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentWait2FeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWait2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentWait2FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentWait2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentWait2FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait2 == rhs.presentWait2 );
#  endif
    }

    bool operator!=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDevicePresentWait2FeaturesKHR;
    void *        pNext        = {};
    Bool32        presentWait2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentWait2FeaturesKHR>
  {
    using Type = PhysicalDevicePresentWait2FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentWait2FeaturesKHR>
  {
    using Type = PhysicalDevicePresentWait2FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDevicePresentWaitFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWaitFeaturesKHR.html
  struct PhysicalDevicePresentWaitFeaturesKHR
  {
    using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentWait{ presentWait_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
    {
      presentWait = presentWait_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWaitFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePresentWaitFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentWait );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait );
#  endif
    }

    bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
    void *        pNext       = {};
    Bool32        presentWait = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentWaitFeaturesKHR>
  {
    using Type = PhysicalDevicePresentWaitFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentWaitFeaturesKHR>
  {
    using Type = PhysicalDevicePresentWaitFeaturesKHR;
  };

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkPhysicalDevicePresentationPropertiesOHOS, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentationPropertiesOHOS.html
  struct PhysicalDevicePresentationPropertiesOHOS
  {
    using NativeType = VkPhysicalDevicePresentationPropertiesOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePresentationPropertiesOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentationPropertiesOHOS( Bool32 sharedImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sharedImage{ sharedImage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePresentationPropertiesOHOS( PhysicalDevicePresentationPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePresentationPropertiesOHOS( VkPhysicalDevicePresentationPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePresentationPropertiesOHOS( *reinterpret_cast<PhysicalDevicePresentationPropertiesOHOS const *>( &rhs ) )
    {
    }

    PhysicalDevicePresentationPropertiesOHOS & operator=( PhysicalDevicePresentationPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePresentationPropertiesOHOS & operator=( VkPhysicalDevicePresentationPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePresentationPropertiesOHOS const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePresentationPropertiesOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePresentationPropertiesOHOS *>( this );
    }

    operator VkPhysicalDevicePresentationPropertiesOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePresentationPropertiesOHOS *>( this );
    }

    operator VkPhysicalDevicePresentationPropertiesOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePresentationPropertiesOHOS *>( this );
    }

    operator VkPhysicalDevicePresentationPropertiesOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePresentationPropertiesOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sharedImage );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePresentationPropertiesOHOS const & ) const = default;
#  else
    bool operator==( PhysicalDevicePresentationPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedImage == rhs.sharedImage );
#    endif
    }

    bool operator!=( PhysicalDevicePresentationPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType       = StructureType::ePhysicalDevicePresentationPropertiesOHOS;
    void *        pNext       = {};
    Bool32        sharedImage = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePresentationPropertiesOHOS>
  {
    using Type = PhysicalDevicePresentationPropertiesOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePresentationPropertiesOHOS>
  {
    using Type = PhysicalDevicePresentationPropertiesOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

  // wrapper struct for struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT.html
  struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( Bool32 primitiveTopologyListRestart_      = {},
                                                                                Bool32 primitiveTopologyPatchListRestart_ = {},
                                                                                void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , primitiveTopologyListRestart{ primitiveTopologyListRestart_ }
      , primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
      operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
      setPrimitiveTopologyListRestart( Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveTopologyListRestart = primitiveTopologyListRestart_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &
      setPrimitiveTopologyPatchListRestart( Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) &&
             ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart );
#  endif
    }

    bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
    void *        pNext                             = {};
    Bool32        primitiveTopologyListRestart      = {};
    Bool32        primitiveTopologyPatchListRestart = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
  {
    using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
  {
    using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT.html
  struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT
  {
    using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( Bool32 primitivesGeneratedQuery_                      = {},
                                                                            Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {},
                                                                            Bool32 primitivesGeneratedQueryWithNonZeroStreams_    = {},
                                                                            void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , primitivesGeneratedQuery{ primitivesGeneratedQuery_ }
      , primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ }
      , primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
      operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
      setPrimitivesGeneratedQuery( Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      primitivesGeneratedQuery = primitivesGeneratedQuery_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
      setPrimitivesGeneratedQueryWithRasterizerDiscard( Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT
    {
      primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &
      setPrimitivesGeneratedQueryWithNonZeroStreams( Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT
    {
      primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) &&
             ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) &&
             ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams );
#  endif
    }

    bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                         = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
    void *        pNext                                         = {};
    Bool32        primitivesGeneratedQuery                      = {};
    Bool32        primitivesGeneratedQueryWithRasterizerDiscard = {};
    Bool32        primitivesGeneratedQueryWithNonZeroStreams    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
  {
    using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>
  {
    using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePrivateDataFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrivateDataFeatures.html
  struct PhysicalDevicePrivateDataFeatures
  {
    using NativeType = VkPhysicalDevicePrivateDataFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePrivateDataFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , privateData{ privateData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
    {
    }

    PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
    {
      privateData = privateData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>( this );
    }

    operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures *>( this );
    }

    operator VkPhysicalDevicePrivateDataFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>( this );
    }

    operator VkPhysicalDevicePrivateDataFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePrivateDataFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, privateData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData );
#  endif
    }

    bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::ePhysicalDevicePrivateDataFeatures;
    void *        pNext       = {};
    Bool32        privateData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePrivateDataFeatures>
  {
    using Type = PhysicalDevicePrivateDataFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
  {
    using Type = PhysicalDevicePrivateDataFeatures;
  };

  using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;

  // wrapper struct for struct VkPhysicalDeviceProtectedMemoryFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html
  struct PhysicalDeviceProtectedMemoryFeatures
  {
    using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceProtectedMemoryFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , protectedMemory{ protectedMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedMemory = protectedMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, protectedMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory );
#  endif
    }

    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
    void *        pNext           = {};
    Bool32        protectedMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProtectedMemoryFeatures>
  {
    using Type = PhysicalDeviceProtectedMemoryFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
  {
    using Type = PhysicalDeviceProtectedMemoryFeatures;
  };

  // wrapper struct for struct VkPhysicalDeviceProtectedMemoryProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryProperties.html
  struct PhysicalDeviceProtectedMemoryProperties
  {
    using NativeType = VkPhysicalDeviceProtectedMemoryProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceProtectedMemoryProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , protectedNoFault{ protectedNoFault_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, protectedNoFault );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault );
#  endif
    }

    bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceProtectedMemoryProperties;
    void *        pNext            = {};
    Bool32        protectedNoFault = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProtectedMemoryProperties>
  {
    using Type = PhysicalDeviceProtectedMemoryProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
  {
    using Type = PhysicalDeviceProtectedMemoryProperties;
  };

  // wrapper struct for struct VkPhysicalDeviceProvokingVertexFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexFeaturesEXT.html
  struct PhysicalDeviceProvokingVertexFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( Bool32 provokingVertexLast_                       = {},
                                                                   Bool32 transformFeedbackPreservesProvokingVertex_ = {},
                                                                   void * pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , provokingVertexLast{ provokingVertexLast_ }
      , transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast( Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
    {
      provokingVertexLast = provokingVertexLast_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT &
      setTransformFeedbackPreservesProvokingVertex( Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) &&
             ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex );
#  endif
    }

    bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                     = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
    void *        pNext                                     = {};
    Bool32        provokingVertexLast                       = {};
    Bool32        transformFeedbackPreservesProvokingVertex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProvokingVertexFeaturesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceProvokingVertexPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexPropertiesEXT.html
  struct PhysicalDeviceProvokingVertexPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( Bool32 provokingVertexModePerPipeline_                       = {},
                                                                     Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {},
                                                                     void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ }
      , transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) &&
             ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
#  endif
    }

    bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
    void *        pNext                                                = {};
    Bool32        provokingVertexModePerPipeline                       = {};
    Bool32        transformFeedbackPreservesTriangleFanProvokingVertex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceProvokingVertexPropertiesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDevicePushDescriptorProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushDescriptorProperties.html
  struct PhysicalDevicePushDescriptorProperties
  {
    using NativeType = VkPhysicalDevicePushDescriptorProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDevicePushDescriptorProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxPushDescriptors{ maxPushDescriptors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePushDescriptorProperties( *reinterpret_cast<PhysicalDevicePushDescriptorProperties const *>( &rhs ) )
    {
    }

    PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDevicePushDescriptorProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorProperties *>( this );
    }

    operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePushDescriptorProperties *>( this );
    }

    operator VkPhysicalDevicePushDescriptorProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDevicePushDescriptorProperties *>( this );
    }

    operator VkPhysicalDevicePushDescriptorProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDevicePushDescriptorProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxPushDescriptors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default;
#else
    bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors );
#  endif
    }

    bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDevicePushDescriptorProperties;
    void *        pNext              = {};
    uint32_t      maxPushDescriptors = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDevicePushDescriptorProperties>
  {
    using Type = PhysicalDevicePushDescriptorProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorProperties>
  {
    using Type = PhysicalDevicePushDescriptorProperties;
  };

  using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties;

  // wrapper struct for struct VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM.html
  struct PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM
  {
    using NativeType = VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM(
      uint32_t                                       queueFamilyIndex_ = {},
      PhysicalDeviceDataGraphProcessingEngineTypeARM engineType_       = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault,
      const void *                                   pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
      , engineType{ engineType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM(
          *reinterpret_cast<PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM &
      operator=( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM &
      operator=( VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM &
      setEngineType( PhysicalDeviceDataGraphProcessingEngineTypeARM engineType_ ) VULKAN_HPP_NOEXCEPT
    {
      engineType = engineType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( this );
    }

    operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( this );
    }

    operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( this );
    }

    operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, PhysicalDeviceDataGraphProcessingEngineTypeARM const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queueFamilyIndex, engineType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( engineType == rhs.engineType );
#  endif
    }

    bool operator!=( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                  sType            = StructureType::ePhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM;
    const void *                                   pNext            = {};
    uint32_t                                       queueFamilyIndex = {};
    PhysicalDeviceDataGraphProcessingEngineTypeARM engineType       = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM>
  {
    using Type = PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM>
  {
    using Type = PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM;
  };

  // wrapper struct for struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT.html
  struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT &
      setFormatRgba10x6WithoutYCbCrSampler( Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
    {
      formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
#  endif
    }

    bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
    void *        pNext                             = {};
    Bool32        formatRgba10x6WithoutYCbCrSampler = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT>
  {
    using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT>
  {
    using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.html
  struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( Bool32 rasterizationOrderColorAttachmentAccess_   = {},
                                                                                      Bool32 rasterizationOrderDepthAttachmentAccess_   = {},
                                                                                      Bool32 rasterizationOrderStencilAttachmentAccess_ = {},
                                                                                      void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ }
      , rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ }
      , rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
      operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
      operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
      setRasterizationOrderColorAttachmentAccess( Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
      setRasterizationOrderDepthAttachmentAccess( Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &
      setRasterizationOrderStencilAttachmentAccess( Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) &&
             ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) &&
             ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                     = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
    void *        pNext                                     = {};
    Bool32        rasterizationOrderColorAttachmentAccess   = {};
    Bool32        rasterizationOrderDepthAttachmentAccess   = {};
    Bool32        rasterizationOrderStencilAttachmentAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
  {
    using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>
  {
    using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
  };

  using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;

  // wrapper struct for struct VkPhysicalDeviceRawAccessChainsFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRawAccessChainsFeaturesNV.html
  struct PhysicalDeviceRawAccessChainsFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRawAccessChainsFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( Bool32 shaderRawAccessChains_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderRawAccessChains{ shaderRawAccessChains_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRawAccessChainsFeaturesNV( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRawAccessChainsFeaturesNV( *reinterpret_cast<PhysicalDeviceRawAccessChainsFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRawAccessChainsFeaturesNV & operator=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRawAccessChainsFeaturesNV & operator=( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRawAccessChainsFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setShaderRawAccessChains( Bool32 shaderRawAccessChains_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderRawAccessChains = shaderRawAccessChains_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRawAccessChainsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRawAccessChainsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRawAccessChainsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRawAccessChainsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRawAccessChainsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRawAccessChainsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRawAccessChainsFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRawAccessChainsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderRawAccessChains );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRawAccessChainsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRawAccessChains == rhs.shaderRawAccessChains );
#  endif
    }

    bool operator!=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV;
    void *        pNext                 = {};
    Bool32        shaderRawAccessChains = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRawAccessChainsFeaturesNV>
  {
    using Type = PhysicalDeviceRawAccessChainsFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV>
  {
    using Type = PhysicalDeviceRawAccessChainsFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayQueryFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayQueryFeaturesKHR.html
  struct PhysicalDeviceRayQueryFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayQuery{ rayQuery_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      rayQuery = rayQuery_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayQuery );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery );
#  endif
    }

    bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
    void *        pNext    = {};
    Bool32        rayQuery = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayQueryFeaturesKHR>
  {
    using Type = PhysicalDeviceRayQueryFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
  {
    using Type = PhysicalDeviceRayQueryFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT.html
  struct PhysicalDeviceRayTracingInvocationReorderFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( Bool32 rayTracingInvocationReorder_ = {},
                                                                               void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingInvocationReorder{ rayTracingInvocationReorder_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingInvocationReorderFeaturesEXT &
      operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT &
      setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingInvocationReorder = rayTracingInvocationReorder_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingInvocationReorder );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT;
    void *        pNext                       = {};
    Bool32        rayTracingInvocationReorder = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV.html
  struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( Bool32 rayTracingInvocationReorder_ = {},
                                                                              void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingInvocationReorder{ rayTracingInvocationReorder_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingInvocationReorderFeaturesNV &
      operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV &
      setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingInvocationReorder = rayTracingInvocationReorder_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingInvocationReorder );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV;
    void *        pNext                       = {};
    Bool32        rayTracingInvocationReorder = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT.html
  struct PhysicalDeviceRayTracingInvocationReorderPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesEXT(
      RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint_ = RayTracingInvocationReorderModeEXT::eNone,
      uint32_t                           maxShaderBindingTableRecordIndex_          = {},
      void *                             pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ }
      , maxShaderBindingTableRecordIndex{ maxShaderBindingTableRecordIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingInvocationReorderPropertiesEXT &
      operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingInvocationReorderPropertiesEXT &
      operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, RayTracingInvocationReorderModeEXT const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint, maxShaderBindingTableRecordIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ) &&
             ( maxShaderBindingTableRecordIndex == rhs.maxShaderBindingTableRecordIndex );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                                     = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT;
    void *                             pNext                                     = {};
    RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint = RayTracingInvocationReorderModeEXT::eNone;
    uint32_t                           maxShaderBindingTableRecordIndex          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV.html
  struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV(
      RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint_ = RayTracingInvocationReorderModeEXT::eNone,
      void *                             pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingInvocationReorderPropertiesNV &
      operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingInvocationReorderPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, RayTracingInvocationReorderModeEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                                     = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV;
    void *                             pNext                                     = {};
    RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint = RayTracingInvocationReorderModeEXT::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV>
  {
    using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV.html
  struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( Bool32 spheres_ = {}, Bool32 linearSweptSpheres_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , spheres{ spheres_ }
      , linearSweptSpheres{ linearSweptSpheres_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &
      operator=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setSpheres( Bool32 spheres_ ) VULKAN_HPP_NOEXCEPT
    {
      spheres = spheres_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setLinearSweptSpheres( Bool32 linearSweptSpheres_ ) VULKAN_HPP_NOEXCEPT
    {
      linearSweptSpheres = linearSweptSpheres_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, spheres, linearSweptSpheres );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spheres == rhs.spheres ) && ( linearSweptSpheres == rhs.linearSweptSpheres );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
    void *        pNext              = {};
    Bool32        spheres            = {};
    Bool32        linearSweptSpheres = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR.html
  struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( Bool32 rayTracingMaintenance1_               = {},
                                                                          Bool32 rayTracingPipelineTraceRaysIndirect2_ = {},
                                                                          void * pNext_                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingMaintenance1{ rayTracingMaintenance1_ }
      , rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingMaintenance1FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingMaintenance1( Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingMaintenance1 = rayTracingMaintenance1_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR &
      setRayTracingPipelineTraceRaysIndirect2( Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) &&
             ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR;
    void *        pNext                                = {};
    Bool32        rayTracingMaintenance1               = {};
    Bool32        rayTracingPipelineTraceRaysIndirect2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMotionBlurFeaturesNV.html
  struct PhysicalDeviceRayTracingMotionBlurFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( Bool32 rayTracingMotionBlur_                          = {},
                                                                       Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {},
                                                                       void * pNext_                                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingMotionBlur{ rayTracingMotionBlur_ }
      , rayTracingMotionBlurPipelineTraceRaysIndirect{ rayTracingMotionBlurPipelineTraceRaysIndirect_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlur( Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingMotionBlur = rayTracingMotionBlur_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV &
      setRayTracingMotionBlurPipelineTraceRaysIndirect( Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) &&
             ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                         = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
    void *        pNext                                         = {};
    Bool32        rayTracingMotionBlur                          = {};
    Bool32        rayTracingMotionBlurPipelineTraceRaysIndirect = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelineFeaturesKHR.html
  struct PhysicalDeviceRayTracingPipelineFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( Bool32 rayTracingPipeline_                                    = {},
                                                                      Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_      = {},
                                                                      Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {},
                                                                      Bool32 rayTracingPipelineTraceRaysIndirect_                   = {},
                                                                      Bool32 rayTraversalPrimitiveCulling_                          = {},
                                                                      void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingPipeline{ rayTracingPipeline_ }
      , rayTracingPipelineShaderGroupHandleCaptureReplay{ rayTracingPipelineShaderGroupHandleCaptureReplay_ }
      , rayTracingPipelineShaderGroupHandleCaptureReplayMixed{ rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ }
      , rayTracingPipelineTraceRaysIndirect{ rayTracingPipelineTraceRaysIndirect_ }
      , rayTraversalPrimitiveCulling{ rayTraversalPrimitiveCulling_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline( Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipeline = rayTracingPipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTracingPipelineShaderGroupHandleCaptureReplay( Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTracingPipelineTraceRaysIndirect( Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTraversalPrimitiveCulling( Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       rayTracingPipeline,
                       rayTracingPipelineShaderGroupHandleCaptureReplay,
                       rayTracingPipelineShaderGroupHandleCaptureReplayMixed,
                       rayTracingPipelineTraceRaysIndirect,
                       rayTraversalPrimitiveCulling );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) &&
             ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) &&
             ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) &&
             ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) &&
             ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                 = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
    void *        pNext                                                 = {};
    Bool32        rayTracingPipeline                                    = {};
    Bool32        rayTracingPipelineShaderGroupHandleCaptureReplay      = {};
    Bool32        rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
    Bool32        rayTracingPipelineTraceRaysIndirect                   = {};
    Bool32        rayTraversalPrimitiveCulling                          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingPipelineFeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelinePropertiesKHR.html
  struct PhysicalDeviceRayTracingPipelinePropertiesKHR
  {
    using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_              = {},
                                                                        uint32_t maxRayRecursionDepth_               = {},
                                                                        uint32_t maxShaderGroupStride_               = {},
                                                                        uint32_t shaderGroupBaseAlignment_           = {},
                                                                        uint32_t shaderGroupHandleCaptureReplaySize_ = {},
                                                                        uint32_t maxRayDispatchInvocationCount_      = {},
                                                                        uint32_t shaderGroupHandleAlignment_         = {},
                                                                        uint32_t maxRayHitAttributeSize_             = {},
                                                                        void *   pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderGroupHandleSize{ shaderGroupHandleSize_ }
      , maxRayRecursionDepth{ maxRayRecursionDepth_ }
      , maxShaderGroupStride{ maxShaderGroupStride_ }
      , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ }
      , shaderGroupHandleCaptureReplaySize{ shaderGroupHandleCaptureReplaySize_ }
      , maxRayDispatchInvocationCount{ maxRayDispatchInvocationCount_ }
      , shaderGroupHandleAlignment{ shaderGroupHandleAlignment_ }
      , maxRayHitAttributeSize{ maxRayHitAttributeSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderGroupHandleSize,
                       maxRayRecursionDepth,
                       maxShaderGroupStride,
                       shaderGroupBaseAlignment,
                       shaderGroupHandleCaptureReplaySize,
                       maxRayDispatchInvocationCount,
                       shaderGroupHandleAlignment,
                       maxRayHitAttributeSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
             ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
             ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) &&
             ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) &&
             ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
    void *        pNext                              = {};
    uint32_t      shaderGroupHandleSize              = {};
    uint32_t      maxRayRecursionDepth               = {};
    uint32_t      maxShaderGroupStride               = {};
    uint32_t      shaderGroupBaseAlignment           = {};
    uint32_t      shaderGroupHandleCaptureReplaySize = {};
    uint32_t      maxRayDispatchInvocationCount      = {};
    uint32_t      shaderGroupHandleAlignment         = {};
    uint32_t      maxRayHitAttributeSize             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingPipelinePropertiesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR.html
  struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( Bool32 rayTracingPositionFetch_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingPositionFetch{ rayTracingPositionFetch_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingPositionFetchFeaturesKHR( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPositionFetchFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPositionFetchFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingPositionFetchFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setRayTracingPositionFetch( Bool32 rayTracingPositionFetch_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPositionFetch = rayTracingPositionFetch_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingPositionFetch );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPositionFetch == rhs.rayTracingPositionFetch );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR;
    void *        pNext                   = {};
    Bool32        rayTracingPositionFetch = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPropertiesNV.html
  struct PhysicalDeviceRayTracingPropertiesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_                  = {},
                                                               uint32_t maxRecursionDepth_                      = {},
                                                               uint32_t maxShaderGroupStride_                   = {},
                                                               uint32_t shaderGroupBaseAlignment_               = {},
                                                               uint64_t maxGeometryCount_                       = {},
                                                               uint64_t maxInstanceCount_                       = {},
                                                               uint64_t maxTriangleCount_                       = {},
                                                               uint32_t maxDescriptorSetAccelerationStructures_ = {},
                                                               void *   pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderGroupHandleSize{ shaderGroupHandleSize_ }
      , maxRecursionDepth{ maxRecursionDepth_ }
      , maxShaderGroupStride{ maxShaderGroupStride_ }
      , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ }
      , maxGeometryCount{ maxGeometryCount_ }
      , maxInstanceCount{ maxInstanceCount_ }
      , maxTriangleCount{ maxTriangleCount_ }
      , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint64_t const &,
               uint64_t const &,
               uint64_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderGroupHandleSize,
                       maxRecursionDepth,
                       maxShaderGroupStride,
                       shaderGroupBaseAlignment,
                       maxGeometryCount,
                       maxInstanceCount,
                       maxTriangleCount,
                       maxDescriptorSetAccelerationStructures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
             ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
             ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) &&
             ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) &&
             ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
    void *        pNext                                  = {};
    uint32_t      shaderGroupHandleSize                  = {};
    uint32_t      maxRecursionDepth                      = {};
    uint32_t      maxShaderGroupStride                   = {};
    uint32_t      shaderGroupBaseAlignment               = {};
    uint64_t      maxGeometryCount                       = {};
    uint64_t      maxInstanceCount                       = {};
    uint64_t      maxTriangleCount                       = {};
    uint32_t      maxDescriptorSetAccelerationStructures = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingPropertiesNV>
  {
    using Type = PhysicalDeviceRayTracingPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
  {
    using Type = PhysicalDeviceRayTracingPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRayTracingValidationFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingValidationFeaturesNV.html
  struct PhysicalDeviceRayTracingValidationFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRayTracingValidationFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( Bool32 rayTracingValidation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rayTracingValidation{ rayTracingValidation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingValidationFeaturesNV( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingValidationFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingValidationFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRayTracingValidationFeaturesNV & operator=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRayTracingValidationFeaturesNV & operator=( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRayTracingValidationFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setRayTracingValidation( Bool32 rayTracingValidation_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingValidation = rayTracingValidation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRayTracingValidationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingValidationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingValidationFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingValidationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingValidationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRayTracingValidationFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingValidationFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRayTracingValidationFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rayTracingValidation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingValidationFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingValidation == rhs.rayTracingValidation );
#  endif
    }

    bool operator!=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV;
    void *        pNext                = {};
    Bool32        rayTracingValidation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRayTracingValidationFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingValidationFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV>
  {
    using Type = PhysicalDeviceRayTracingValidationFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG.html
  struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG
  {
    using NativeType = VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( Bool32 relaxedLineRasterization_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , relaxedLineRasterization{ relaxedLineRasterization_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( *reinterpret_cast<PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *>( &rhs ) )
    {
    }

    PhysicalDeviceRelaxedLineRasterizationFeaturesIMG &
      operator=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG &
      setRelaxedLineRasterization( Bool32 relaxedLineRasterization_ ) VULKAN_HPP_NOEXCEPT
    {
      relaxedLineRasterization = relaxedLineRasterization_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *>( this );
    }

    operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *>( this );
    }

    operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *>( this );
    }

    operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, relaxedLineRasterization );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & ) const = default;
#else
    bool operator==( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( relaxedLineRasterization == rhs.relaxedLineRasterization );
#  endif
    }

    bool operator!=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
    void *        pNext                    = {};
    Bool32        relaxedLineRasterization = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG>
  {
    using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG>
  {
    using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG;
  };

  // wrapper struct for struct VkPhysicalDeviceRenderPassStripedFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedFeaturesARM.html
  struct PhysicalDeviceRenderPassStripedFeaturesARM
  {
    using NativeType = VkPhysicalDeviceRenderPassStripedFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( Bool32 renderPassStriped_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , renderPassStriped{ renderPassStriped_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRenderPassStripedFeaturesARM( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRenderPassStripedFeaturesARM( *reinterpret_cast<PhysicalDeviceRenderPassStripedFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceRenderPassStripedFeaturesARM & operator=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRenderPassStripedFeaturesARM & operator=( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRenderPassStripedFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setRenderPassStriped( Bool32 renderPassStriped_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPassStriped = renderPassStriped_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRenderPassStripedFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRenderPassStripedFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRenderPassStripedFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRenderPassStripedFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRenderPassStripedFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, renderPassStriped );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRenderPassStripedFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStriped == rhs.renderPassStriped );
#  endif
    }

    bool operator!=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM;
    void *        pNext             = {};
    Bool32        renderPassStriped = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRenderPassStripedFeaturesARM>
  {
    using Type = PhysicalDeviceRenderPassStripedFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM>
  {
    using Type = PhysicalDeviceRenderPassStripedFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceRenderPassStripedPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedPropertiesARM.html
  struct PhysicalDeviceRenderPassStripedPropertiesARM
  {
    using NativeType = VkPhysicalDeviceRenderPassStripedPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( Extent2D renderPassStripeGranularity_ = {},
                                                                       uint32_t maxRenderPassStripes_        = {},
                                                                       void *   pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , renderPassStripeGranularity{ renderPassStripeGranularity_ }
      , maxRenderPassStripes{ maxRenderPassStripes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRenderPassStripedPropertiesARM( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRenderPassStripedPropertiesARM( *reinterpret_cast<PhysicalDeviceRenderPassStripedPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceRenderPassStripedPropertiesARM & operator=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRenderPassStripedPropertiesARM & operator=( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRenderPassStripedPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRenderPassStripedPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRenderPassStripedPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRenderPassStripedPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRenderPassStripedPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceRenderPassStripedPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRenderPassStripedPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, renderPassStripeGranularity, maxRenderPassStripes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRenderPassStripedPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStripeGranularity == rhs.renderPassStripeGranularity ) &&
             ( maxRenderPassStripes == rhs.maxRenderPassStripes );
#  endif
    }

    bool operator!=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM;
    void *        pNext                       = {};
    Extent2D      renderPassStripeGranularity = {};
    uint32_t      maxRenderPassStripes        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRenderPassStripedPropertiesARM>
  {
    using Type = PhysicalDeviceRenderPassStripedPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM>
  {
    using Type = PhysicalDeviceRenderPassStripedPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV.html
  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
  {
    using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( Bool32 representativeFragmentTest_ = {},
                                                                             void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , representativeFragmentTest{ representativeFragmentTest_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
      operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
      setRepresentativeFragmentTest( Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
    {
      representativeFragmentTest = representativeFragmentTest_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, representativeFragmentTest );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest );
#  endif
    }

    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
    void *        pNext                      = {};
    Bool32        representativeFragmentTest = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV>
  {
    using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
  {
    using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceRobustness2FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2FeaturesKHR.html
  struct PhysicalDeviceRobustness2FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceRobustness2FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRobustness2FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesKHR( Bool32 robustBufferAccess2_ = {},
                                                               Bool32 robustImageAccess2_  = {},
                                                               Bool32 nullDescriptor_      = {},
                                                               void * pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , robustBufferAccess2{ robustBufferAccess2_ }
      , robustImageAccess2{ robustImageAccess2_ }
      , nullDescriptor{ nullDescriptor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesKHR( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2FeaturesKHR( VkPhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRobustness2FeaturesKHR( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRobustness2FeaturesKHR & operator=( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRobustness2FeaturesKHR & operator=( VkPhysicalDeviceRobustness2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRobustness2FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setRobustBufferAccess2( Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
    {
      robustBufferAccess2 = robustBufferAccess2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setRobustImageAccess2( Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
    {
      robustImageAccess2 = robustImageAccess2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesKHR & setNullDescriptor( Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
    {
      nullDescriptor = nullDescriptor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceRobustness2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRobustness2FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) &&
             ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor );
#  endif
    }

    bool operator!=( PhysicalDeviceRobustness2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceRobustness2FeaturesKHR;
    void *        pNext               = {};
    Bool32        robustBufferAccess2 = {};
    Bool32        robustImageAccess2  = {};
    Bool32        nullDescriptor      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRobustness2FeaturesKHR>
  {
    using Type = PhysicalDeviceRobustness2FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesKHR>
  {
    using Type = PhysicalDeviceRobustness2FeaturesKHR;
  };

  using PhysicalDeviceRobustness2FeaturesEXT = PhysicalDeviceRobustness2FeaturesKHR;

  // wrapper struct for struct VkPhysicalDeviceRobustness2PropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2PropertiesKHR.html
  struct PhysicalDeviceRobustness2PropertiesKHR
  {
    using NativeType = VkPhysicalDeviceRobustness2PropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceRobustness2PropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesKHR( DeviceSize robustStorageBufferAccessSizeAlignment_ = {},
                                                                 DeviceSize robustUniformBufferAccessSizeAlignment_ = {},
                                                                 void *     pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , robustStorageBufferAccessSizeAlignment{ robustStorageBufferAccessSizeAlignment_ }
      , robustUniformBufferAccessSizeAlignment{ robustUniformBufferAccessSizeAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesKHR( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2PropertiesKHR( VkPhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRobustness2PropertiesKHR( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceRobustness2PropertiesKHR & operator=( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceRobustness2PropertiesKHR & operator=( VkPhysicalDeviceRobustness2PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceRobustness2PropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRobustness2PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2PropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRobustness2PropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRobustness2PropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) &&
             ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceRobustness2PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceRobustness2PropertiesKHR;
    void *        pNext                                  = {};
    DeviceSize    robustStorageBufferAccessSizeAlignment = {};
    DeviceSize    robustUniformBufferAccessSizeAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceRobustness2PropertiesKHR>
  {
    using Type = PhysicalDeviceRobustness2PropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesKHR>
  {
    using Type = PhysicalDeviceRobustness2PropertiesKHR;
  };

  using PhysicalDeviceRobustness2PropertiesEXT = PhysicalDeviceRobustness2PropertiesKHR;

  // wrapper struct for struct VkPhysicalDeviceSampleLocationsPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSampleLocationsPropertiesEXT.html
  struct PhysicalDeviceSampleLocationsPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( SampleCountFlags             sampleLocationSampleCounts_    = {},
                                                                        Extent2D                     maxSampleLocationGridSize_     = {},
                                                                        std::array<float, 2> const & sampleLocationCoordinateRange_ = {},
                                                                        uint32_t                     sampleLocationSubPixelBits_    = {},
                                                                        Bool32                       variableSampleLocations_       = {},
                                                                        void *                       pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampleLocationSampleCounts{ sampleLocationSampleCounts_ }
      , maxSampleLocationGridSize{ maxSampleLocationGridSize_ }
      , sampleLocationCoordinateRange{ sampleLocationCoordinateRange_ }
      , sampleLocationSubPixelBits{ sampleLocationSubPixelBits_ }
      , variableSampleLocations{ variableSampleLocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               SampleCountFlags const &,
               Extent2D const &,
               ArrayWrapper1D<float, 2> const &,
               uint32_t const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       sampleLocationSampleCounts,
                       maxSampleLocationGridSize,
                       sampleLocationCoordinateRange,
                       sampleLocationSubPixelBits,
                       variableSampleLocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) &&
             ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) &&
             ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations );
#  endif
    }

    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                         = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
    void *                   pNext                         = {};
    SampleCountFlags         sampleLocationSampleCounts    = {};
    Extent2D                 maxSampleLocationGridSize     = {};
    ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
    uint32_t                 sampleLocationSubPixelBits    = {};
    Bool32                   variableSampleLocations       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSampleLocationsPropertiesEXT>
  {
    using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
  {
    using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceSamplerFilterMinmaxProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerFilterMinmaxProperties.html
  struct PhysicalDeviceSamplerFilterMinmaxProperties
  {
    using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( Bool32 filterMinmaxSingleComponentFormats_ = {},
                                                                      Bool32 filterMinmaxImageComponentMapping_  = {},
                                                                      void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ }
      , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
             ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
#  endif
    }

    bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
    void *        pNext                              = {};
    Bool32        filterMinmaxSingleComponentFormats = {};
    Bool32        filterMinmaxImageComponentMapping  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSamplerFilterMinmaxProperties>
  {
    using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
  {
    using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
  };

  using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;

  // wrapper struct for struct VkPhysicalDeviceSamplerYcbcrConversionFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerYcbcrConversionFeatures.html
  struct PhysicalDeviceSamplerYcbcrConversionFeatures
  {
    using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( Bool32 samplerYcbcrConversion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , samplerYcbcrConversion{ samplerYcbcrConversion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerYcbcrConversion = samplerYcbcrConversion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, samplerYcbcrConversion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
#  endif
    }

    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
    void *        pNext                  = {};
    Bool32        samplerYcbcrConversion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSamplerYcbcrConversionFeatures>
  {
    using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
  {
    using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
  };

  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;

  // wrapper struct for struct VkPhysicalDeviceScalarBlockLayoutFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceScalarBlockLayoutFeatures.html
  struct PhysicalDeviceScalarBlockLayoutFeatures
  {
    using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( Bool32 scalarBlockLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , scalarBlockLayout{ scalarBlockLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      scalarBlockLayout = scalarBlockLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceScalarBlockLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceScalarBlockLayoutFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, scalarBlockLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout );
#  endif
    }

    bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
    void *        pNext             = {};
    Bool32        scalarBlockLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceScalarBlockLayoutFeatures>
  {
    using Type = PhysicalDeviceScalarBlockLayoutFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
  {
    using Type = PhysicalDeviceScalarBlockLayoutFeatures;
  };

  using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;

  // wrapper struct for struct VkPhysicalDeviceSchedulingControlsFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsFeaturesARM.html
  struct PhysicalDeviceSchedulingControlsFeaturesARM
  {
    using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( Bool32 schedulingControls_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , schedulingControls{ schedulingControls_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSchedulingControlsFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setSchedulingControls( Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT
    {
      schedulingControls = schedulingControls_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSchedulingControlsFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, schedulingControls );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControls == rhs.schedulingControls );
#  endif
    }

    bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM;
    void *        pNext              = {};
    Bool32        schedulingControls = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSchedulingControlsFeaturesARM>
  {
    using Type = PhysicalDeviceSchedulingControlsFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM>
  {
    using Type = PhysicalDeviceSchedulingControlsFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceSchedulingControlsPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsPropertiesARM.html
  struct PhysicalDeviceSchedulingControlsPropertiesARM
  {
    using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {},
                                                                        void *                                   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , schedulingControlsFlags{ schedulingControlsFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast<PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSchedulingControlsPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceSchedulingControlsPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSchedulingControlsPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PhysicalDeviceSchedulingControlsFlagsARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, schedulingControlsFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControlsFlags == rhs.schedulingControlsFlags );
#  endif
    }

    bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType                   = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM;
    void *                                   pNext                   = {};
    PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSchedulingControlsPropertiesARM>
  {
    using Type = PhysicalDeviceSchedulingControlsPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM>
  {
    using Type = PhysicalDeviceSchedulingControlsPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures.html
  struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
  {
    using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( Bool32 separateDepthStencilLayouts_ = {},
                                                                            void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , separateDepthStencilLayouts{ separateDepthStencilLayouts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
      operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
      setSeparateDepthStencilLayouts( Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      separateDepthStencilLayouts = separateDepthStencilLayouts_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, separateDepthStencilLayouts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
#  endif
    }

    bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
    void *        pNext                       = {};
    Bool32        separateDepthStencilLayouts = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>
  {
    using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
  {
    using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  };

  using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;

  // wrapper struct for struct VkPhysicalDeviceShader64BitIndexingFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShader64BitIndexingFeaturesEXT.html
  struct PhysicalDeviceShader64BitIndexingFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShader64BitIndexingFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShader64BitIndexingFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShader64BitIndexingFeaturesEXT( Bool32 shader64BitIndexing_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shader64BitIndexing{ shader64BitIndexing_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShader64BitIndexingFeaturesEXT( PhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShader64BitIndexingFeaturesEXT( VkPhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShader64BitIndexingFeaturesEXT( *reinterpret_cast<PhysicalDeviceShader64BitIndexingFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShader64BitIndexingFeaturesEXT & operator=( PhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShader64BitIndexingFeaturesEXT & operator=( VkPhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShader64BitIndexingFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShader64BitIndexingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShader64BitIndexingFeaturesEXT & setShader64BitIndexing( Bool32 shader64BitIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shader64BitIndexing = shader64BitIndexing_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShader64BitIndexingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShader64BitIndexingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShader64BitIndexingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShader64BitIndexingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShader64BitIndexingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShader64BitIndexingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShader64BitIndexingFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShader64BitIndexingFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shader64BitIndexing );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShader64BitIndexingFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shader64BitIndexing == rhs.shader64BitIndexing );
#  endif
    }

    bool operator!=( PhysicalDeviceShader64BitIndexingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceShader64BitIndexingFeaturesEXT;
    void *        pNext               = {};
    Bool32        shader64BitIndexing = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShader64BitIndexingFeaturesEXT>
  {
    using Type = PhysicalDeviceShader64BitIndexingFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShader64BitIndexingFeaturesEXT>
  {
    using Type = PhysicalDeviceShader64BitIndexingFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV.html
  struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV
  {
    using NativeType = VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( Bool32 shaderFloat16VectorAtomics_ = {},
                                                                            void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderFloat16VectorAtomics{ shaderFloat16VectorAtomics_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &
      operator=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &
      setShaderFloat16VectorAtomics( Bool32 shaderFloat16VectorAtomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat16VectorAtomics = shaderFloat16VectorAtomics_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderFloat16VectorAtomics );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16VectorAtomics == rhs.shaderFloat16VectorAtomics );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
    void *        pNext                      = {};
    Bool32        shaderFloat16VectorAtomics = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>
  {
    using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>
  {
    using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT.html
  struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( Bool32 shaderBufferFloat16Atomics_      = {},
                                                                      Bool32 shaderBufferFloat16AtomicAdd_    = {},
                                                                      Bool32 shaderBufferFloat16AtomicMinMax_ = {},
                                                                      Bool32 shaderBufferFloat32AtomicMinMax_ = {},
                                                                      Bool32 shaderBufferFloat64AtomicMinMax_ = {},
                                                                      Bool32 shaderSharedFloat16Atomics_      = {},
                                                                      Bool32 shaderSharedFloat16AtomicAdd_    = {},
                                                                      Bool32 shaderSharedFloat16AtomicMinMax_ = {},
                                                                      Bool32 shaderSharedFloat32AtomicMinMax_ = {},
                                                                      Bool32 shaderSharedFloat64AtomicMinMax_ = {},
                                                                      Bool32 shaderImageFloat32AtomicMinMax_  = {},
                                                                      Bool32 sparseImageFloat32AtomicMinMax_  = {},
                                                                      void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderBufferFloat16Atomics{ shaderBufferFloat16Atomics_ }
      , shaderBufferFloat16AtomicAdd{ shaderBufferFloat16AtomicAdd_ }
      , shaderBufferFloat16AtomicMinMax{ shaderBufferFloat16AtomicMinMax_ }
      , shaderBufferFloat32AtomicMinMax{ shaderBufferFloat32AtomicMinMax_ }
      , shaderBufferFloat64AtomicMinMax{ shaderBufferFloat64AtomicMinMax_ }
      , shaderSharedFloat16Atomics{ shaderSharedFloat16Atomics_ }
      , shaderSharedFloat16AtomicAdd{ shaderSharedFloat16AtomicAdd_ }
      , shaderSharedFloat16AtomicMinMax{ shaderSharedFloat16AtomicMinMax_ }
      , shaderSharedFloat32AtomicMinMax{ shaderSharedFloat32AtomicMinMax_ }
      , shaderSharedFloat64AtomicMinMax{ shaderSharedFloat64AtomicMinMax_ }
      , shaderImageFloat32AtomicMinMax{ shaderImageFloat32AtomicMinMax_ }
      , sparseImageFloat32AtomicMinMax{ sparseImageFloat32AtomicMinMax_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderBufferFloat16Atomics( Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderBufferFloat16AtomicAdd( Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderBufferFloat16AtomicMinMax( Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderBufferFloat32AtomicMinMax( Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderBufferFloat64AtomicMinMax( Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderSharedFloat16Atomics( Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderSharedFloat16AtomicAdd( Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderSharedFloat16AtomicMinMax( Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderSharedFloat32AtomicMinMax( Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderSharedFloat64AtomicMinMax( Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setShaderImageFloat32AtomicMinMax( Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &
      setSparseImageFloat32AtomicMinMax( Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderBufferFloat16Atomics,
                       shaderBufferFloat16AtomicAdd,
                       shaderBufferFloat16AtomicMinMax,
                       shaderBufferFloat32AtomicMinMax,
                       shaderBufferFloat64AtomicMinMax,
                       shaderSharedFloat16Atomics,
                       shaderSharedFloat16AtomicAdd,
                       shaderSharedFloat16AtomicMinMax,
                       shaderSharedFloat32AtomicMinMax,
                       shaderSharedFloat64AtomicMinMax,
                       shaderImageFloat32AtomicMinMax,
                       sparseImageFloat32AtomicMinMax );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) &&
             ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) &&
             ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) &&
             ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) &&
             ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) &&
             ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) &&
             ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) &&
             ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) &&
             ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) &&
             ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) &&
             ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
    void *        pNext                           = {};
    Bool32        shaderBufferFloat16Atomics      = {};
    Bool32        shaderBufferFloat16AtomicAdd    = {};
    Bool32        shaderBufferFloat16AtomicMinMax = {};
    Bool32        shaderBufferFloat32AtomicMinMax = {};
    Bool32        shaderBufferFloat64AtomicMinMax = {};
    Bool32        shaderSharedFloat16Atomics      = {};
    Bool32        shaderSharedFloat16AtomicAdd    = {};
    Bool32        shaderSharedFloat16AtomicMinMax = {};
    Bool32        shaderSharedFloat32AtomicMinMax = {};
    Bool32        shaderSharedFloat64AtomicMinMax = {};
    Bool32        shaderImageFloat32AtomicMinMax  = {};
    Bool32        sparseImageFloat32AtomicMinMax  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloatFeaturesEXT.html
  struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( Bool32 shaderBufferFloat32Atomics_   = {},
                                                                     Bool32 shaderBufferFloat32AtomicAdd_ = {},
                                                                     Bool32 shaderBufferFloat64Atomics_   = {},
                                                                     Bool32 shaderBufferFloat64AtomicAdd_ = {},
                                                                     Bool32 shaderSharedFloat32Atomics_   = {},
                                                                     Bool32 shaderSharedFloat32AtomicAdd_ = {},
                                                                     Bool32 shaderSharedFloat64Atomics_   = {},
                                                                     Bool32 shaderSharedFloat64AtomicAdd_ = {},
                                                                     Bool32 shaderImageFloat32Atomics_    = {},
                                                                     Bool32 shaderImageFloat32AtomicAdd_  = {},
                                                                     Bool32 sparseImageFloat32Atomics_    = {},
                                                                     Bool32 sparseImageFloat32AtomicAdd_  = {},
                                                                     void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderBufferFloat32Atomics{ shaderBufferFloat32Atomics_ }
      , shaderBufferFloat32AtomicAdd{ shaderBufferFloat32AtomicAdd_ }
      , shaderBufferFloat64Atomics{ shaderBufferFloat64Atomics_ }
      , shaderBufferFloat64AtomicAdd{ shaderBufferFloat64AtomicAdd_ }
      , shaderSharedFloat32Atomics{ shaderSharedFloat32Atomics_ }
      , shaderSharedFloat32AtomicAdd{ shaderSharedFloat32AtomicAdd_ }
      , shaderSharedFloat64Atomics{ shaderSharedFloat64Atomics_ }
      , shaderSharedFloat64AtomicAdd{ shaderSharedFloat64AtomicAdd_ }
      , shaderImageFloat32Atomics{ shaderImageFloat32Atomics_ }
      , shaderImageFloat32AtomicAdd{ shaderImageFloat32AtomicAdd_ }
      , sparseImageFloat32Atomics{ sparseImageFloat32Atomics_ }
      , sparseImageFloat32AtomicAdd{ sparseImageFloat32AtomicAdd_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat32AtomicAdd( Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat64AtomicAdd( Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat32AtomicAdd( Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat64AtomicAdd( Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderImageFloat32AtomicAdd( Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setSparseImageFloat32AtomicAdd( Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderBufferFloat32Atomics,
                       shaderBufferFloat32AtomicAdd,
                       shaderBufferFloat64Atomics,
                       shaderBufferFloat64AtomicAdd,
                       shaderSharedFloat32Atomics,
                       shaderSharedFloat32AtomicAdd,
                       shaderSharedFloat64Atomics,
                       shaderSharedFloat64AtomicAdd,
                       shaderImageFloat32Atomics,
                       shaderImageFloat32AtomicAdd,
                       sparseImageFloat32Atomics,
                       sparseImageFloat32AtomicAdd );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) &&
             ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) &&
             ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) &&
             ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) &&
             ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) &&
             ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) &&
             ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
    void *        pNext                        = {};
    Bool32        shaderBufferFloat32Atomics   = {};
    Bool32        shaderBufferFloat32AtomicAdd = {};
    Bool32        shaderBufferFloat64Atomics   = {};
    Bool32        shaderBufferFloat64AtomicAdd = {};
    Bool32        shaderSharedFloat32Atomics   = {};
    Bool32        shaderSharedFloat32AtomicAdd = {};
    Bool32        shaderSharedFloat64Atomics   = {};
    Bool32        shaderSharedFloat64AtomicAdd = {};
    Bool32        shaderImageFloat32Atomics    = {};
    Bool32        shaderImageFloat32AtomicAdd  = {};
    Bool32        sparseImageFloat32Atomics    = {};
    Bool32        sparseImageFloat32AtomicAdd  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderAtomicInt64Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicInt64Features.html
  struct PhysicalDeviceShaderAtomicInt64Features
  {
    using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderAtomicInt64Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( Bool32 shaderBufferInt64Atomics_ = {},
                                                                  Bool32 shaderSharedInt64Atomics_ = {},
                                                                  void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ }
      , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicInt64Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicInt64Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
             ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
    void *        pNext                    = {};
    Bool32        shaderBufferInt64Atomics = {};
    Bool32        shaderSharedInt64Atomics = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderAtomicInt64Features>
  {
    using Type = PhysicalDeviceShaderAtomicInt64Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
  {
    using Type = PhysicalDeviceShaderAtomicInt64Features;
  };

  using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;

  // wrapper struct for struct VkPhysicalDeviceShaderBfloat16FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderBfloat16FeaturesKHR.html
  struct PhysicalDeviceShaderBfloat16FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderBfloat16FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR( Bool32 shaderBFloat16Type_              = {},
                                                                  Bool32 shaderBFloat16DotProduct_        = {},
                                                                  Bool32 shaderBFloat16CooperativeMatrix_ = {},
                                                                  void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderBFloat16Type{ shaderBFloat16Type_ }
      , shaderBFloat16DotProduct{ shaderBFloat16DotProduct_ }
      , shaderBFloat16CooperativeMatrix{ shaderBFloat16CooperativeMatrix_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderBfloat16FeaturesKHR( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderBfloat16FeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderBfloat16FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderBfloat16FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setShaderBFloat16Type( Bool32 shaderBFloat16Type_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBFloat16Type = shaderBFloat16Type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setShaderBFloat16DotProduct( Bool32 shaderBFloat16DotProduct_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBFloat16DotProduct = shaderBFloat16DotProduct_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR &
      setShaderBFloat16CooperativeMatrix( Bool32 shaderBFloat16CooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBFloat16CooperativeMatrix = shaderBFloat16CooperativeMatrix_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderBfloat16FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderBfloat16FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderBfloat16FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderBfloat16FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderBfloat16FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderBfloat16FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderBFloat16Type, shaderBFloat16DotProduct, shaderBFloat16CooperativeMatrix );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderBfloat16FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBFloat16Type == rhs.shaderBFloat16Type ) &&
             ( shaderBFloat16DotProduct == rhs.shaderBFloat16DotProduct ) && ( shaderBFloat16CooperativeMatrix == rhs.shaderBFloat16CooperativeMatrix );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR;
    void *        pNext                           = {};
    Bool32        shaderBFloat16Type              = {};
    Bool32        shaderBFloat16DotProduct        = {};
    Bool32        shaderBFloat16CooperativeMatrix = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderBfloat16FeaturesKHR>
  {
    using Type = PhysicalDeviceShaderBfloat16FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR>
  {
    using Type = PhysicalDeviceShaderBfloat16FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderClockFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderClockFeaturesKHR.html
  struct PhysicalDeviceShaderClockFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderClockFeaturesKHR( Bool32 shaderSubgroupClock_ = {}, Bool32 shaderDeviceClock_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSubgroupClock{ shaderSubgroupClock_ }
      , shaderDeviceClock{ shaderDeviceClock_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupClock = shaderSubgroupClock_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDeviceClock = shaderDeviceClock_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderClockFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderClockFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) &&
             ( shaderDeviceClock == rhs.shaderDeviceClock );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
    void *        pNext               = {};
    Bool32        shaderSubgroupClock = {};
    Bool32        shaderDeviceClock   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderClockFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderClockFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderClockFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM.html
  struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM
  {
    using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( Bool32 shaderCoreBuiltins_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCoreBuiltins{ shaderCoreBuiltins_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setShaderCoreBuiltins( Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCoreBuiltins = shaderCoreBuiltins_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCoreBuiltins );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM;
    void *        pNext              = {};
    Bool32        shaderCoreBuiltins = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM>
  {
    using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM>
  {
    using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM.html
  struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM
  {
    using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( uint64_t shaderCoreMask_     = {},
                                                                        uint32_t shaderCoreCount_    = {},
                                                                        uint32_t shaderWarpsPerCore_ = {},
                                                                        void *   pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCoreMask{ shaderCoreMask_ }
      , shaderCoreCount{ shaderCoreCount_ }
      , shaderWarpsPerCore{ shaderWarpsPerCore_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderCoreBuiltinsPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreMask == rhs.shaderCoreMask ) && ( shaderCoreCount == rhs.shaderCoreCount ) &&
             ( shaderWarpsPerCore == rhs.shaderWarpsPerCore );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM;
    void *        pNext              = {};
    uint64_t      shaderCoreMask     = {};
    uint32_t      shaderCoreCount    = {};
    uint32_t      shaderWarpsPerCore = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM>
  {
    using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM>
  {
    using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderCoreProperties2AMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreProperties2AMD.html
  struct PhysicalDeviceShaderCoreProperties2AMD
  {
    using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( ShaderCorePropertiesFlagsAMD shaderCoreFeatures_     = {},
                                                                 uint32_t                     activeComputeUnitCount_ = {},
                                                                 void *                       pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderCoreFeatures{ shaderCoreFeatures_ }
      , activeComputeUnitCount{ activeComputeUnitCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ShaderCorePropertiesFlagsAMD const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) &&
             ( activeComputeUnitCount == rhs.activeComputeUnitCount );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                  = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
    void *                       pNext                  = {};
    ShaderCorePropertiesFlagsAMD shaderCoreFeatures     = {};
    uint32_t                     activeComputeUnitCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderCoreProperties2AMD>
  {
    using Type = PhysicalDeviceShaderCoreProperties2AMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
  {
    using Type = PhysicalDeviceShaderCoreProperties2AMD;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesAMD.html
  struct PhysicalDeviceShaderCorePropertiesAMD
  {
    using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_          = {},
                                                                uint32_t shaderArraysPerEngineCount_ = {},
                                                                uint32_t computeUnitsPerShaderArray_ = {},
                                                                uint32_t simdPerComputeUnit_         = {},
                                                                uint32_t wavefrontsPerSimd_          = {},
                                                                uint32_t wavefrontSize_              = {},
                                                                uint32_t sgprsPerSimd_               = {},
                                                                uint32_t minSgprAllocation_          = {},
                                                                uint32_t maxSgprAllocation_          = {},
                                                                uint32_t sgprAllocationGranularity_  = {},
                                                                uint32_t vgprsPerSimd_               = {},
                                                                uint32_t minVgprAllocation_          = {},
                                                                uint32_t maxVgprAllocation_          = {},
                                                                uint32_t vgprAllocationGranularity_  = {},
                                                                void *   pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderEngineCount{ shaderEngineCount_ }
      , shaderArraysPerEngineCount{ shaderArraysPerEngineCount_ }
      , computeUnitsPerShaderArray{ computeUnitsPerShaderArray_ }
      , simdPerComputeUnit{ simdPerComputeUnit_ }
      , wavefrontsPerSimd{ wavefrontsPerSimd_ }
      , wavefrontSize{ wavefrontSize_ }
      , sgprsPerSimd{ sgprsPerSimd_ }
      , minSgprAllocation{ minSgprAllocation_ }
      , maxSgprAllocation{ maxSgprAllocation_ }
      , sgprAllocationGranularity{ sgprAllocationGranularity_ }
      , vgprsPerSimd{ vgprsPerSimd_ }
      , minVgprAllocation{ minVgprAllocation_ }
      , maxVgprAllocation{ maxVgprAllocation_ }
      , vgprAllocationGranularity{ vgprAllocationGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       shaderEngineCount,
                       shaderArraysPerEngineCount,
                       computeUnitsPerShaderArray,
                       simdPerComputeUnit,
                       wavefrontsPerSimd,
                       wavefrontSize,
                       sgprsPerSimd,
                       minSgprAllocation,
                       maxSgprAllocation,
                       sgprAllocationGranularity,
                       vgprsPerSimd,
                       minVgprAllocation,
                       maxVgprAllocation,
                       vgprAllocationGranularity );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) &&
             ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) &&
             ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) &&
             ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) &&
             ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) &&
             ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) &&
             ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
    void *        pNext                      = {};
    uint32_t      shaderEngineCount          = {};
    uint32_t      shaderArraysPerEngineCount = {};
    uint32_t      computeUnitsPerShaderArray = {};
    uint32_t      simdPerComputeUnit         = {};
    uint32_t      wavefrontsPerSimd          = {};
    uint32_t      wavefrontSize              = {};
    uint32_t      sgprsPerSimd               = {};
    uint32_t      minSgprAllocation          = {};
    uint32_t      maxSgprAllocation          = {};
    uint32_t      sgprAllocationGranularity  = {};
    uint32_t      vgprsPerSimd               = {};
    uint32_t      minVgprAllocation          = {};
    uint32_t      maxVgprAllocation          = {};
    uint32_t      vgprAllocationGranularity  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderCorePropertiesAMD>
  {
    using Type = PhysicalDeviceShaderCorePropertiesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
  {
    using Type = PhysicalDeviceShaderCorePropertiesAMD;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesARM.html
  struct PhysicalDeviceShaderCorePropertiesARM
  {
    using NativeType = VkPhysicalDeviceShaderCorePropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderCorePropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( uint32_t pixelRate_ = {},
                                                                uint32_t texelRate_ = {},
                                                                uint32_t fmaRate_   = {},
                                                                void *   pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pixelRate{ pixelRate_ }
      , texelRate{ texelRate_ }
      , fmaRate{ fmaRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCorePropertiesARM( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCorePropertiesARM( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderCorePropertiesARM & operator=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderCorePropertiesARM & operator=( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderCorePropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCorePropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesARM *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pixelRate, texelRate, fmaRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pixelRate == rhs.pixelRate ) && ( texelRate == rhs.texelRate ) && ( fmaRate == rhs.fmaRate );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::ePhysicalDeviceShaderCorePropertiesARM;
    void *        pNext     = {};
    uint32_t      pixelRate = {};
    uint32_t      texelRate = {};
    uint32_t      fmaRate   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderCorePropertiesARM>
  {
    using Type = PhysicalDeviceShaderCorePropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesARM>
  {
    using Type = PhysicalDeviceShaderCorePropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.html
  struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures
  {
    using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( Bool32 shaderDemoteToHelperInvocation_ = {},
                                                                               void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
      operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures &
      setShaderDemoteToHelperInvocation( Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderDemoteToHelperInvocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
    void *        pNext                          = {};
    Bool32        shaderDemoteToHelperInvocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures>
  {
    using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
  {
    using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
  };

  using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderDrawParametersFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDrawParametersFeatures.html
  struct PhysicalDeviceShaderDrawParametersFeatures
  {
    using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderDrawParameters{ shaderDrawParameters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDrawParameters = shaderDrawParameters_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDrawParametersFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDrawParametersFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderDrawParameters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
    void *        pNext                = {};
    Bool32        shaderDrawParameters = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderDrawParametersFeatures>
  {
    using Type = PhysicalDeviceShaderDrawParametersFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
  {
    using Type = PhysicalDeviceShaderDrawParametersFeatures;
  };

  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.html
  struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD
  {
    using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( Bool32 shaderEarlyAndLateFragmentTests_ = {},
                                                                                   void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderEarlyAndLateFragmentTests{ shaderEarlyAndLateFragmentTests_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
      operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
      operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &
      setShaderEarlyAndLateFragmentTests( Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
    void *        pNext                           = {};
    Bool32        shaderEarlyAndLateFragmentTests = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
  {
    using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>
  {
    using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueueFeaturesAMDX.html
  struct PhysicalDeviceShaderEnqueueFeaturesAMDX
  {
    using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderEnqueueFeaturesAMDX( Bool32 shaderEnqueue_ = {}, Bool32 shaderMeshEnqueue_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderEnqueue{ shaderEnqueue_ }
      , shaderMeshEnqueue{ shaderMeshEnqueue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderEnqueueFeaturesAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderEnqueue = shaderEnqueue_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderMeshEnqueue( Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderMeshEnqueue = shaderMeshEnqueue_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderEnqueueFeaturesAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default;
#  else
    bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ) && ( shaderMeshEnqueue == rhs.shaderMeshEnqueue );
#    endif
    }

    bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX;
    void *        pNext             = {};
    Bool32        shaderEnqueue     = {};
    Bool32        shaderMeshEnqueue = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderEnqueueFeaturesAMDX>
  {
    using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX>
  {
    using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueuePropertiesAMDX.html
  struct PhysicalDeviceShaderEnqueuePropertiesAMDX
  {
    using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t                        maxExecutionGraphDepth_                 = {},
                                                                       uint32_t                        maxExecutionGraphShaderOutputNodes_     = {},
                                                                       uint32_t                        maxExecutionGraphShaderPayloadSize_     = {},
                                                                       uint32_t                        maxExecutionGraphShaderPayloadCount_    = {},
                                                                       uint32_t                        executionGraphDispatchAddressAlignment_ = {},
                                                                       std::array<uint32_t, 3> const & maxExecutionGraphWorkgroupCount_        = {},
                                                                       uint32_t                        maxExecutionGraphWorkgroups_            = {},
                                                                       void *                          pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxExecutionGraphDepth{ maxExecutionGraphDepth_ }
      , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ }
      , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ }
      , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ }
      , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ }
      , maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ }
      , maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast<PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderEnqueuePropertiesAMDX const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
    }

    operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderEnqueuePropertiesAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxExecutionGraphDepth,
                       maxExecutionGraphShaderOutputNodes,
                       maxExecutionGraphShaderPayloadSize,
                       maxExecutionGraphShaderPayloadCount,
                       executionGraphDispatchAddressAlignment,
                       maxExecutionGraphWorkgroupCount,
                       maxExecutionGraphWorkgroups );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default;
#  else
    bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) &&
             ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) &&
             ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) &&
             ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) &&
             ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ) &&
             ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount ) && ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups );
#    endif
    }

    bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                                  = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX;
    void *                      pNext                                  = {};
    uint32_t                    maxExecutionGraphDepth                 = {};
    uint32_t                    maxExecutionGraphShaderOutputNodes     = {};
    uint32_t                    maxExecutionGraphShaderPayloadSize     = {};
    uint32_t                    maxExecutionGraphShaderPayloadCount    = {};
    uint32_t                    executionGraphDispatchAddressAlignment = {};
    ArrayWrapper1D<uint32_t, 3> maxExecutionGraphWorkgroupCount        = {};
    uint32_t                    maxExecutionGraphWorkgroups            = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderEnqueuePropertiesAMDX>
  {
    using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX>
  {
    using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPhysicalDeviceShaderExpectAssumeFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderExpectAssumeFeatures.html
  struct PhysicalDeviceShaderExpectAssumeFeatures
  {
    using NativeType = VkPhysicalDeviceShaderExpectAssumeFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( Bool32 shaderExpectAssume_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderExpectAssume{ shaderExpectAssume_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderExpectAssumeFeatures( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderExpectAssumeFeatures( *reinterpret_cast<PhysicalDeviceShaderExpectAssumeFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderExpectAssumeFeatures & operator=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderExpectAssumeFeatures & operator=( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderExpectAssumeFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setShaderExpectAssume( Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderExpectAssume = shaderExpectAssume_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderExpectAssumeFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderExpectAssumeFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderExpectAssumeFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderExpectAssumeFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderExpectAssumeFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderExpectAssumeFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderExpectAssumeFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderExpectAssume );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderExpectAssume == rhs.shaderExpectAssume );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures;
    void *        pNext              = {};
    Bool32        shaderExpectAssume = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderExpectAssumeFeatures>
  {
    using Type = PhysicalDeviceShaderExpectAssumeFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderExpectAssumeFeatures>
  {
    using Type = PhysicalDeviceShaderExpectAssumeFeatures;
  };

  using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderFloat16Int8Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloat16Int8Features.html
  struct PhysicalDeviceShaderFloat16Int8Features
  {
    using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderFloat16Int8Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderFloat16Int8Features( Bool32 shaderFloat16_ = {}, Bool32 shaderInt8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderFloat16{ shaderFloat16_ }
      , shaderInt8{ shaderInt8_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat16 = shaderFloat16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt8 = shaderInt8_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloat16Int8Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloat16Int8Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderFloat16, shaderInt8 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
    void *        pNext         = {};
    Bool32        shaderFloat16 = {};
    Bool32        shaderInt8    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderFloat16Int8Features>
  {
    using Type = PhysicalDeviceShaderFloat16Int8Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
  {
    using Type = PhysicalDeviceShaderFloat16Int8Features;
  };

  using PhysicalDeviceFloat16Int8FeaturesKHR       = PhysicalDeviceShaderFloat16Int8Features;
  using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;

  // wrapper struct for struct VkPhysicalDeviceShaderFloat8FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloat8FeaturesEXT.html
  struct PhysicalDeviceShaderFloat8FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderFloat8FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat8FeaturesEXT( Bool32 shaderFloat8_                  = {},
                                                                Bool32 shaderFloat8CooperativeMatrix_ = {},
                                                                void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderFloat8{ shaderFloat8_ }
      , shaderFloat8CooperativeMatrix{ shaderFloat8CooperativeMatrix_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat8FeaturesEXT( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFloat8FeaturesEXT( VkPhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderFloat8FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderFloat8FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderFloat8FeaturesEXT & operator=( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderFloat8FeaturesEXT & operator=( VkPhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderFloat8FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT & setShaderFloat8( Bool32 shaderFloat8_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat8 = shaderFloat8_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat8FeaturesEXT &
      setShaderFloat8CooperativeMatrix( Bool32 shaderFloat8CooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat8CooperativeMatrix = shaderFloat8CooperativeMatrix_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderFloat8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat8FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderFloat8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderFloat8FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderFloat8FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderFloat8FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderFloat8FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderFloat8FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderFloat8, shaderFloat8CooperativeMatrix );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderFloat8FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat8 == rhs.shaderFloat8 ) &&
             ( shaderFloat8CooperativeMatrix == rhs.shaderFloat8CooperativeMatrix );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderFloat8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT;
    void *        pNext                         = {};
    Bool32        shaderFloat8                  = {};
    Bool32        shaderFloat8CooperativeMatrix = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderFloat8FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderFloat8FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderFloat8FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderFloatControls2Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloatControls2Features.html
  struct PhysicalDeviceShaderFloatControls2Features
  {
    using NativeType = VkPhysicalDeviceShaderFloatControls2Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderFloatControls2Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( Bool32 shaderFloatControls2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderFloatControls2{ shaderFloatControls2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFloatControls2Features( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderFloatControls2Features( *reinterpret_cast<PhysicalDeviceShaderFloatControls2Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderFloatControls2Features & operator=( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderFloatControls2Features & operator=( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderFloatControls2Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setShaderFloatControls2( Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloatControls2 = shaderFloatControls2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderFloatControls2Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderFloatControls2Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloatControls2Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderFloatControls2Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloatControls2Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderFloatControls2Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderFloatControls2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloatControls2 == rhs.shaderFloatControls2 );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceShaderFloatControls2Features;
    void *        pNext                = {};
    Bool32        shaderFloatControls2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderFloatControls2Features>
  {
    using Type = PhysicalDeviceShaderFloatControls2Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloatControls2Features>
  {
    using Type = PhysicalDeviceShaderFloatControls2Features;
  };

  using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features;

  // wrapper struct for struct VkPhysicalDeviceShaderFmaFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFmaFeaturesKHR.html
  struct PhysicalDeviceShaderFmaFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderFmaFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderFmaFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFmaFeaturesKHR( Bool32 shaderFmaFloat16_ = {},
                                                             Bool32 shaderFmaFloat32_ = {},
                                                             Bool32 shaderFmaFloat64_ = {},
                                                             void * pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderFmaFloat16{ shaderFmaFloat16_ }
      , shaderFmaFloat32{ shaderFmaFloat32_ }
      , shaderFmaFloat64{ shaderFmaFloat64_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFmaFeaturesKHR( PhysicalDeviceShaderFmaFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFmaFeaturesKHR( VkPhysicalDeviceShaderFmaFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderFmaFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderFmaFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderFmaFeaturesKHR & operator=( PhysicalDeviceShaderFmaFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderFmaFeaturesKHR & operator=( VkPhysicalDeviceShaderFmaFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderFmaFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFmaFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFmaFeaturesKHR & setShaderFmaFloat16( Bool32 shaderFmaFloat16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFmaFloat16 = shaderFmaFloat16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFmaFeaturesKHR & setShaderFmaFloat32( Bool32 shaderFmaFloat32_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFmaFloat32 = shaderFmaFloat32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFmaFeaturesKHR & setShaderFmaFloat64( Bool32 shaderFmaFloat64_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFmaFloat64 = shaderFmaFloat64_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderFmaFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderFmaFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderFmaFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderFmaFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderFmaFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderFmaFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderFmaFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderFmaFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderFmaFloat16, shaderFmaFloat32, shaderFmaFloat64 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderFmaFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderFmaFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFmaFloat16 == rhs.shaderFmaFloat16 ) && ( shaderFmaFloat32 == rhs.shaderFmaFloat32 ) &&
             ( shaderFmaFloat64 == rhs.shaderFmaFloat64 );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderFmaFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceShaderFmaFeaturesKHR;
    void *        pNext            = {};
    Bool32        shaderFmaFloat16 = {};
    Bool32        shaderFmaFloat32 = {};
    Bool32        shaderFmaFloat64 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderFmaFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderFmaFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFmaFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderFmaFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT.html
  struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( Bool32 shaderImageInt64Atomics_ = {},
                                                                          Bool32 sparseImageInt64Atomics_ = {},
                                                                          void * pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderImageInt64Atomics{ shaderImageInt64Atomics_ }
      , sparseImageInt64Atomics{ sparseImageInt64Atomics_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageInt64Atomics = shaderImageInt64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageInt64Atomics = sparseImageInt64Atomics_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) &&
             ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
    void *        pNext                   = {};
    Bool32        shaderImageInt64Atomics = {};
    Bool32        sparseImageInt64Atomics = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderImageFootprintFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageFootprintFeaturesNV.html
  struct PhysicalDeviceShaderImageFootprintFeaturesNV
  {
    using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( Bool32 imageFootprint_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageFootprint{ imageFootprint_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
    {
      imageFootprint = imageFootprint_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageFootprint );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
    void *        pNext          = {};
    Bool32        imageFootprint = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderImageFootprintFeaturesNV>
  {
    using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
  {
    using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductFeatures.html
  struct PhysicalDeviceShaderIntegerDotProductFeatures
  {
    using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( Bool32 shaderIntegerDotProduct_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderIntegerDotProduct{ shaderIntegerDotProduct_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setShaderIntegerDotProduct( Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderIntegerDotProduct = shaderIntegerDotProduct_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderIntegerDotProduct );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
    void *        pNext                   = {};
    Bool32        shaderIntegerDotProduct = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderIntegerDotProductFeatures>
  {
    using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
  {
    using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
  };

  using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductProperties.html
  struct PhysicalDeviceShaderIntegerDotProductProperties
  {
    using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderIntegerDotProductProperties( Bool32 integerDotProduct8BitUnsignedAccelerated_                                      = {},
                                                       Bool32 integerDotProduct8BitSignedAccelerated_                                        = {},
                                                       Bool32 integerDotProduct8BitMixedSignednessAccelerated_                               = {},
                                                       Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_                              = {},
                                                       Bool32 integerDotProduct4x8BitPackedSignedAccelerated_                                = {},
                                                       Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_                       = {},
                                                       Bool32 integerDotProduct16BitUnsignedAccelerated_                                     = {},
                                                       Bool32 integerDotProduct16BitSignedAccelerated_                                       = {},
                                                       Bool32 integerDotProduct16BitMixedSignednessAccelerated_                              = {},
                                                       Bool32 integerDotProduct32BitUnsignedAccelerated_                                     = {},
                                                       Bool32 integerDotProduct32BitSignedAccelerated_                                       = {},
                                                       Bool32 integerDotProduct32BitMixedSignednessAccelerated_                              = {},
                                                       Bool32 integerDotProduct64BitUnsignedAccelerated_                                     = {},
                                                       Bool32 integerDotProduct64BitSignedAccelerated_                                       = {},
                                                       Bool32 integerDotProduct64BitMixedSignednessAccelerated_                              = {},
                                                       Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_                = {},
                                                       Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_                  = {},
                                                       Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_         = {},
                                                       Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_        = {},
                                                       Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_          = {},
                                                       Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
                                                       Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_               = {},
                                                       Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_                 = {},
                                                       Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_        = {},
                                                       Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_               = {},
                                                       Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_                 = {},
                                                       Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_        = {},
                                                       Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_               = {},
                                                       Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_                 = {},
                                                       Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_        = {},
                                                       void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ }
      , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ }
      , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ }
      , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ }
      , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ }
      , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ }
      , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ }
      , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ }
      , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ }
      , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ }
      , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ }
      , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ }
      , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ }
      , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ }
      , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerDotProductProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       integerDotProduct8BitUnsignedAccelerated,
                       integerDotProduct8BitSignedAccelerated,
                       integerDotProduct8BitMixedSignednessAccelerated,
                       integerDotProduct4x8BitPackedUnsignedAccelerated,
                       integerDotProduct4x8BitPackedSignedAccelerated,
                       integerDotProduct4x8BitPackedMixedSignednessAccelerated,
                       integerDotProduct16BitUnsignedAccelerated,
                       integerDotProduct16BitSignedAccelerated,
                       integerDotProduct16BitMixedSignednessAccelerated,
                       integerDotProduct32BitUnsignedAccelerated,
                       integerDotProduct32BitSignedAccelerated,
                       integerDotProduct32BitMixedSignednessAccelerated,
                       integerDotProduct64BitUnsignedAccelerated,
                       integerDotProduct64BitSignedAccelerated,
                       integerDotProduct64BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating8BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating8BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating16BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating16BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating32BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating32BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating64BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating64BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
             ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
             ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
             ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
             ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
             ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
             ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
             ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
             ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
             ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
             ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
             ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
             ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
             ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
             ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                                                  = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
    void *        pNext                                                                  = {};
    Bool32        integerDotProduct8BitUnsignedAccelerated                               = {};
    Bool32        integerDotProduct8BitSignedAccelerated                                 = {};
    Bool32        integerDotProduct8BitMixedSignednessAccelerated                        = {};
    Bool32        integerDotProduct4x8BitPackedUnsignedAccelerated                       = {};
    Bool32        integerDotProduct4x8BitPackedSignedAccelerated                         = {};
    Bool32        integerDotProduct4x8BitPackedMixedSignednessAccelerated                = {};
    Bool32        integerDotProduct16BitUnsignedAccelerated                              = {};
    Bool32        integerDotProduct16BitSignedAccelerated                                = {};
    Bool32        integerDotProduct16BitMixedSignednessAccelerated                       = {};
    Bool32        integerDotProduct32BitUnsignedAccelerated                              = {};
    Bool32        integerDotProduct32BitSignedAccelerated                                = {};
    Bool32        integerDotProduct32BitMixedSignednessAccelerated                       = {};
    Bool32        integerDotProduct64BitUnsignedAccelerated                              = {};
    Bool32        integerDotProduct64BitSignedAccelerated                                = {};
    Bool32        integerDotProduct64BitMixedSignednessAccelerated                       = {};
    Bool32        integerDotProductAccumulatingSaturating8BitUnsignedAccelerated         = {};
    Bool32        integerDotProductAccumulatingSaturating8BitSignedAccelerated           = {};
    Bool32        integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated  = {};
    Bool32        integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
    Bool32        integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated   = {};
    Bool32        integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
    Bool32        integerDotProductAccumulatingSaturating16BitUnsignedAccelerated               = {};
    Bool32        integerDotProductAccumulatingSaturating16BitSignedAccelerated                 = {};
    Bool32        integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated        = {};
    Bool32        integerDotProductAccumulatingSaturating32BitUnsignedAccelerated               = {};
    Bool32        integerDotProductAccumulatingSaturating32BitSignedAccelerated                 = {};
    Bool32        integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated        = {};
    Bool32        integerDotProductAccumulatingSaturating64BitUnsignedAccelerated               = {};
    Bool32        integerDotProductAccumulatingSaturating64BitSignedAccelerated                 = {};
    Bool32        integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderIntegerDotProductProperties>
  {
    using Type = PhysicalDeviceShaderIntegerDotProductProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
  {
    using Type = PhysicalDeviceShaderIntegerDotProductProperties;
  };

  using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;

  // wrapper struct for struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL.html
  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
  {
    using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( Bool32 shaderIntegerFunctions2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderIntegerFunctions2{ shaderIntegerFunctions2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
      operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
      setShaderIntegerFunctions2( Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderIntegerFunctions2 = shaderIntegerFunctions2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderIntegerFunctions2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
    void *        pNext                   = {};
    Bool32        shaderIntegerFunctions2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
  {
    using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
  {
    using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR.html
  struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( Bool32 shaderMaximalReconvergence_ = {},
                                                                              void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderMaximalReconvergence{ shaderMaximalReconvergence_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &
      operator=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & operator=( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &
      setShaderMaximalReconvergence( Bool32 shaderMaximalReconvergence_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderMaximalReconvergence = shaderMaximalReconvergence_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderMaximalReconvergence );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderMaximalReconvergence == rhs.shaderMaximalReconvergence );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
    void *        pNext                      = {};
    Bool32        shaderMaximalReconvergence = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT.html
  struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( Bool32 shaderModuleIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderModuleIdentifier{ shaderModuleIdentifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setShaderModuleIdentifier( Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderModuleIdentifier = shaderModuleIdentifier_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderModuleIdentifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT;
    void *        pNext                  = {};
    Bool32        shaderModuleIdentifier = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT.html
  struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array<uint8_t, VK_UUID_SIZE> const & shaderModuleIdentifierAlgorithmUUID_ = {},
                                                         void *                                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderModuleIdentifierPropertiesEXT &
      operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderModuleIdentifierPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                               = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT;
    void *                                pNext                               = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderModuleIdentifierAlgorithmUUID = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT>
  {
    using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT>
  {
    using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderObjectFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectFeaturesEXT.html
  struct PhysicalDeviceShaderObjectFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderObject{ shaderObject_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderObject = shaderObject_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderObject );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
    void *        pNext        = {};
    Bool32        shaderObject = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderObjectFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderObjectFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderObjectFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderObjectPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectPropertiesEXT.html
  struct PhysicalDeviceShaderObjectPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array<uint8_t, VK_UUID_SIZE> const & shaderBinaryUUID_    = {},
                                                                     uint32_t                                  shaderBinaryVersion_ = {},
                                                                     void *                                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderBinaryUUID{ shaderBinaryUUID_ }
      , shaderBinaryVersion{ shaderBinaryVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderObjectPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) &&
             ( shaderBinaryVersion == rhs.shaderBinaryVersion );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType               = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
    void *                                pNext               = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderBinaryUUID    = {};
    uint32_t                              shaderBinaryVersion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderObjectPropertiesEXT>
  {
    using Type = PhysicalDeviceShaderObjectPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectPropertiesEXT>
  {
    using Type = PhysicalDeviceShaderObjectPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderQuadControlFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderQuadControlFeaturesKHR.html
  struct PhysicalDeviceShaderQuadControlFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderQuadControlFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( Bool32 shaderQuadControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderQuadControl{ shaderQuadControl_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderQuadControlFeaturesKHR( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderQuadControlFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderQuadControlFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderQuadControlFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setShaderQuadControl( Bool32 shaderQuadControl_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderQuadControl = shaderQuadControl_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderQuadControlFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderQuadControlFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderQuadControlFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderQuadControlFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderQuadControlFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderQuadControlFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderQuadControl );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderQuadControlFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderQuadControl == rhs.shaderQuadControl );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR;
    void *        pNext             = {};
    Bool32        shaderQuadControl = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderQuadControlFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderQuadControlFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderQuadControlFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.html
  struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( Bool32 shaderRelaxedExtendedInstruction_ = {},
                                                                                    void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderRelaxedExtendedInstruction{ shaderRelaxedExtendedInstruction_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &
      operator=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &
      operator=( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &
      setShaderRelaxedExtendedInstruction( Bool32 shaderRelaxedExtendedInstruction_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderRelaxedExtendedInstruction );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRelaxedExtendedInstruction == rhs.shaderRelaxedExtendedInstruction );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
    void *        pNext                            = {};
    Bool32        shaderRelaxedExtendedInstruction = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT.html
  struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( Bool32 shaderReplicatedComposites_ = {},
                                                                              void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderReplicatedComposites{ shaderReplicatedComposites_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderReplicatedCompositesFeaturesEXT &
      operator=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT &
      setShaderReplicatedComposites( Bool32 shaderReplicatedComposites_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderReplicatedComposites = shaderReplicatedComposites_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderReplicatedComposites );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderReplicatedComposites == rhs.shaderReplicatedComposites );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
    void *        pNext                      = {};
    Bool32        shaderReplicatedComposites = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsFeaturesNV.html
  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
  {
    using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( Bool32 shaderSMBuiltins_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSMBuiltins{ shaderSMBuiltins_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSMBuiltins = shaderSMBuiltins_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSMBuiltins );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
    void *        pNext            = {};
    Bool32        shaderSMBuiltins = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsPropertiesNV.html
  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
  {
    using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSMCount{ shaderSMCount_ }
      , shaderWarpsPerSM{ shaderWarpsPerSM_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
    void *        pNext            = {};
    uint32_t      shaderSMCount    = {};
    uint32_t      shaderWarpsPerSM = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures.html
  struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
  {
    using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( Bool32 shaderSubgroupExtendedTypes_ = {},
                                                                            void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
      operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
      setShaderSubgroupExtendedTypes( Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSubgroupExtendedTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
    void *        pNext                       = {};
    Bool32        shaderSubgroupExtendedTypes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>
  {
    using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
  {
    using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  };

  using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderSubgroupRotateFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupRotateFeatures.html
  struct PhysicalDeviceShaderSubgroupRotateFeatures
  {
    using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( Bool32 shaderSubgroupRotate_          = {},
                                                                     Bool32 shaderSubgroupRotateClustered_ = {},
                                                                     void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSubgroupRotate{ shaderSubgroupRotate_ }
      , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupRotateFeatures( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSubgroupRotateFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupRotateFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderSubgroupRotateFeatures & operator=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderSubgroupRotateFeatures & operator=( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderSubgroupRotateFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setShaderSubgroupRotate( Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupRotate = shaderSubgroupRotate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures &
      setShaderSubgroupRotateClustered( Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderSubgroupRotateFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupRotateFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupRotateFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupRotateFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupRotateFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupRotateFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupRotateFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderSubgroupRotateFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSubgroupRotate, shaderSubgroupRotateClustered );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) &&
             ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures;
    void *        pNext                         = {};
    Bool32        shaderSubgroupRotate          = {};
    Bool32        shaderSubgroupRotateClustered = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderSubgroupRotateFeatures>
  {
    using Type = PhysicalDeviceShaderSubgroupRotateFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures>
  {
    using Type = PhysicalDeviceShaderSubgroupRotateFeatures;
  };

  using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.html
  struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( Bool32 shaderSubgroupUniformControlFlow_ = {},
                                                                                    void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderSubgroupUniformControlFlow{ shaderSubgroupUniformControlFlow_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
      operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
      operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
      setShaderSubgroupUniformControlFlow( Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderSubgroupUniformControlFlow );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
    void *        pNext                            = {};
    Bool32        shaderSubgroupUniformControlFlow = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderTerminateInvocationFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTerminateInvocationFeatures.html
  struct PhysicalDeviceShaderTerminateInvocationFeatures
  {
    using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( Bool32 shaderTerminateInvocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderTerminateInvocation{ shaderTerminateInvocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures &
      setShaderTerminateInvocation( Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTerminateInvocation = shaderTerminateInvocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderTerminateInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderTerminateInvocationFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderTerminateInvocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
    void *        pNext                     = {};
    Bool32        shaderTerminateInvocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderTerminateInvocationFeatures>
  {
    using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
  {
    using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
  };

  using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;

  // wrapper struct for struct VkPhysicalDeviceShaderTileImageFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImageFeaturesEXT.html
  struct PhysicalDeviceShaderTileImageFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( Bool32 shaderTileImageColorReadAccess_   = {},
                                                                   Bool32 shaderTileImageDepthReadAccess_   = {},
                                                                   Bool32 shaderTileImageStencilReadAccess_ = {},
                                                                   void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderTileImageColorReadAccess{ shaderTileImageColorReadAccess_ }
      , shaderTileImageDepthReadAccess{ shaderTileImageDepthReadAccess_ }
      , shaderTileImageStencilReadAccess{ shaderTileImageStencilReadAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
      setShaderTileImageColorReadAccess( Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
      setShaderTileImageDepthReadAccess( Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
      setShaderTileImageStencilReadAccess( Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImageFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImageFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) &&
             ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) &&
             ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
    void *        pNext                            = {};
    Bool32        shaderTileImageColorReadAccess   = {};
    Bool32        shaderTileImageDepthReadAccess   = {};
    Bool32        shaderTileImageStencilReadAccess = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderTileImageFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderTileImageFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderTileImageFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderTileImagePropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImagePropertiesEXT.html
  struct PhysicalDeviceShaderTileImagePropertiesEXT
  {
    using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( Bool32 shaderTileImageCoherentReadAccelerated_           = {},
                                                                     Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {},
                                                                     Bool32 shaderTileImageReadFromHelperInvocation_          = {},
                                                                     void * pNext_                                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderTileImageCoherentReadAccelerated{ shaderTileImageCoherentReadAccelerated_ }
      , shaderTileImageReadSampleFromPixelRateInvocation{ shaderTileImageReadSampleFromPixelRateInvocation_ }
      , shaderTileImageReadFromHelperInvocation{ shaderTileImageReadFromHelperInvocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImagePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderTileImagePropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) &&
             ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) &&
             ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                            = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
    void *        pNext                                            = {};
    Bool32        shaderTileImageCoherentReadAccelerated           = {};
    Bool32        shaderTileImageReadSampleFromPixelRateInvocation = {};
    Bool32        shaderTileImageReadFromHelperInvocation          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderTileImagePropertiesEXT>
  {
    using Type = PhysicalDeviceShaderTileImagePropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT>
  {
    using Type = PhysicalDeviceShaderTileImagePropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT.html
  struct PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT( Bool32 shaderUniformBufferUnsizedArray_ = {},
                                                                                   void * pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderUniformBufferUnsizedArray{ shaderUniformBufferUnsizedArray_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT( PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT( VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT &
      operator=( PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT &
      operator=( VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT &
      setShaderUniformBufferUnsizedArray( Bool32 shaderUniformBufferUnsizedArray_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferUnsizedArray = shaderUniformBufferUnsizedArray_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderUniformBufferUnsizedArray );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderUniformBufferUnsizedArray == rhs.shaderUniformBufferUnsizedArray );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::ePhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT;
    void *        pNext                           = {};
    Bool32        shaderUniformBufferUnsizedArray = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderUniformBufferUnsizedArrayFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceShaderUntypedPointersFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderUntypedPointersFeaturesKHR.html
  struct PhysicalDeviceShaderUntypedPointersFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceShaderUntypedPointersFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShaderUntypedPointersFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderUntypedPointersFeaturesKHR( Bool32 shaderUntypedPointers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderUntypedPointers{ shaderUntypedPointers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderUntypedPointersFeaturesKHR( PhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderUntypedPointersFeaturesKHR( VkPhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderUntypedPointersFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderUntypedPointersFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceShaderUntypedPointersFeaturesKHR & operator=( PhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShaderUntypedPointersFeaturesKHR & operator=( VkPhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShaderUntypedPointersFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderUntypedPointersFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderUntypedPointersFeaturesKHR & setShaderUntypedPointers( Bool32 shaderUntypedPointers_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUntypedPointers = shaderUntypedPointers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShaderUntypedPointersFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderUntypedPointersFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderUntypedPointersFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderUntypedPointersFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderUntypedPointersFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShaderUntypedPointersFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderUntypedPointersFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShaderUntypedPointersFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderUntypedPointers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderUntypedPointersFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderUntypedPointers == rhs.shaderUntypedPointers );
#  endif
    }

    bool operator!=( PhysicalDeviceShaderUntypedPointersFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceShaderUntypedPointersFeaturesKHR;
    void *        pNext                 = {};
    Bool32        shaderUntypedPointers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShaderUntypedPointersFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderUntypedPointersFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderUntypedPointersFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderUntypedPointersFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceShadingRateImageFeaturesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImageFeaturesNV.html
  struct PhysicalDeviceShadingRateImageFeaturesNV
  {
    using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( Bool32 shadingRateImage_             = {},
                                                                   Bool32 shadingRateCoarseSampleOrder_ = {},
                                                                   void * pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shadingRateImage{ shadingRateImage_ }
      , shadingRateCoarseSampleOrder{ shadingRateCoarseSampleOrder_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateImage = shadingRateImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &
      setShadingRateCoarseSampleOrder( Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImageFeaturesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) &&
             ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
#  endif
    }

    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
    void *        pNext                        = {};
    Bool32        shadingRateImage             = {};
    Bool32        shadingRateCoarseSampleOrder = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShadingRateImageFeaturesNV>
  {
    using Type = PhysicalDeviceShadingRateImageFeaturesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
  {
    using Type = PhysicalDeviceShadingRateImageFeaturesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceShadingRateImagePropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImagePropertiesNV.html
  struct PhysicalDeviceShadingRateImagePropertiesNV
  {
    using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( Extent2D shadingRateTexelSize_        = {},
                                                                     uint32_t shadingRatePaletteSize_      = {},
                                                                     uint32_t shadingRateMaxCoarseSamples_ = {},
                                                                     void *   pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shadingRateTexelSize{ shadingRateTexelSize_ }
      , shadingRatePaletteSize{ shadingRatePaletteSize_ }
      , shadingRateMaxCoarseSamples{ shadingRateMaxCoarseSamples_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
    {
    }

    PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) &&
             ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
#  endif
    }

    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
    void *        pNext                       = {};
    Extent2D      shadingRateTexelSize        = {};
    uint32_t      shadingRatePaletteSize      = {};
    uint32_t      shadingRateMaxCoarseSamples = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceShadingRateImagePropertiesNV>
  {
    using Type = PhysicalDeviceShadingRateImagePropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
  {
    using Type = PhysicalDeviceShadingRateImagePropertiesNV;
  };

  // wrapper struct for struct VkPhysicalDeviceSparseImageFormatInfo2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseImageFormatInfo2.html
  struct PhysicalDeviceSparseImageFormatInfo2
  {
    using NativeType = VkPhysicalDeviceSparseImageFormatInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSparseImageFormatInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( Format              format_  = Format::eUndefined,
                                                               ImageType           type_    = ImageType::e1D,
                                                               SampleCountFlagBits samples_ = SampleCountFlagBits::e1,
                                                               ImageUsageFlags     usage_   = {},
                                                               ImageTiling         tiling_  = ImageTiling::eOptimal,
                                                               const void *        pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , type{ type_ }
      , samples{ samples_ }
      , usage{ usage_ }
      , tiling{ tiling_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
    {
    }

    PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( ImageType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceSparseImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceSparseImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               ImageType const &,
               SampleCountFlagBits const &,
               ImageUsageFlags const &,
               ImageTiling const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, format, type, samples, usage, tiling );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) &&
             ( usage == rhs.usage ) && ( tiling == rhs.tiling );
#  endif
    }

    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType   = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
    const void *        pNext   = {};
    Format              format  = Format::eUndefined;
    ImageType           type    = ImageType::e1D;
    SampleCountFlagBits samples = SampleCountFlagBits::e1;
    ImageUsageFlags     usage   = {};
    ImageTiling         tiling  = ImageTiling::eOptimal;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSparseImageFormatInfo2>
  {
    using Type = PhysicalDeviceSparseImageFormatInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
  {
    using Type = PhysicalDeviceSparseImageFormatInfo2;
  };

  using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;

  // wrapper struct for struct VkPhysicalDeviceSubgroupProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupProperties.html
  struct PhysicalDeviceSubgroupProperties
  {
    using NativeType = VkPhysicalDeviceSubgroupProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubgroupProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t             subgroupSize_              = {},
                                                           ShaderStageFlags     supportedStages_           = {},
                                                           SubgroupFeatureFlags supportedOperations_       = {},
                                                           Bool32               quadOperationsInAllStages_ = {},
                                                           void *               pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subgroupSize{ subgroupSize_ }
      , supportedStages{ supportedStages_ }
      , supportedOperations{ supportedOperations_ }
      , quadOperationsInAllStages{ quadOperationsInAllStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, ShaderStageFlags const &, SubgroupFeatureFlags const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) &&
             ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
#  endif
    }

    bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType                     = StructureType::ePhysicalDeviceSubgroupProperties;
    void *               pNext                     = {};
    uint32_t             subgroupSize              = {};
    ShaderStageFlags     supportedStages           = {};
    SubgroupFeatureFlags supportedOperations       = {};
    Bool32               quadOperationsInAllStages = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubgroupProperties>
  {
    using Type = PhysicalDeviceSubgroupProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
  {
    using Type = PhysicalDeviceSubgroupProperties;
  };

  // wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlFeatures.html
  struct PhysicalDeviceSubgroupSizeControlFeatures
  {
    using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( Bool32 subgroupSizeControl_  = {},
                                                                    Bool32 computeFullSubgroups_ = {},
                                                                    void * pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subgroupSizeControl{ subgroupSizeControl_ }
      , computeFullSubgroups{ computeFullSubgroups_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setSubgroupSizeControl( Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
    {
      subgroupSizeControl = subgroupSizeControl_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setComputeFullSubgroups( Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
    {
      computeFullSubgroups = computeFullSubgroups_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) &&
             ( computeFullSubgroups == rhs.computeFullSubgroups );
#  endif
    }

    bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
    void *        pNext                = {};
    Bool32        subgroupSizeControl  = {};
    Bool32        computeFullSubgroups = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubgroupSizeControlFeatures>
  {
    using Type = PhysicalDeviceSubgroupSizeControlFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
  {
    using Type = PhysicalDeviceSubgroupSizeControlFeatures;
  };

  using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;

  // wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlProperties.html
  struct PhysicalDeviceSubgroupSizeControlProperties
  {
    using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t         minSubgroupSize_              = {},
                                                                      uint32_t         maxSubgroupSize_              = {},
                                                                      uint32_t         maxComputeWorkgroupSubgroups_ = {},
                                                                      ShaderStageFlags requiredSubgroupSizeStages_   = {},
                                                                      void *           pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minSubgroupSize{ minSubgroupSize_ }
      , maxSubgroupSize{ maxSubgroupSize_ }
      , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ }
      , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, ShaderStageFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
             ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
#  endif
    }

    bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                        = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
    void *           pNext                        = {};
    uint32_t         minSubgroupSize              = {};
    uint32_t         maxSubgroupSize              = {};
    uint32_t         maxComputeWorkgroupSubgroups = {};
    ShaderStageFlags requiredSubgroupSizeStages   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubgroupSizeControlProperties>
  {
    using Type = PhysicalDeviceSubgroupSizeControlProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
  {
    using Type = PhysicalDeviceSubgroupSizeControlProperties;
  };

  using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;

  // wrapper struct for struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT.html
  struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( Bool32 subpassMergeFeedback_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subpassMergeFeedback{ subpassMergeFeedback_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setSubpassMergeFeedback( Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassMergeFeedback = subpassMergeFeedback_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subpassMergeFeedback );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback );
#  endif
    }

    bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
    void *        pNext                = {};
    Bool32        subpassMergeFeedback = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
  {
    using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT>
  {
    using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingFeaturesHUAWEI.html
  struct PhysicalDeviceSubpassShadingFeaturesHUAWEI
  {
    using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( Bool32 subpassShading_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subpassShading{ subpassShading_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassShading = subpassShading_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subpassShading );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading );
#  endif
    }

    bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
    void *        pNext          = {};
    Bool32        subpassShading = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI>
  {
    using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingPropertiesHUAWEI.html
  struct PhysicalDeviceSubpassShadingPropertiesHUAWEI
  {
    using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {},
                                                                       void *   pNext_                                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxSubpassShadingWorkgroupSizeAspectRatio{ maxSubpassShadingWorkgroupSizeAspectRatio_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
    {
    }

    PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
    }

    operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
#  endif
    }

    bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                     = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
    void *        pNext                                     = {};
    uint32_t      maxSubpassShadingWorkgroupSizeAspectRatio = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI>
  {
    using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI>
  {
    using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
  };

  // wrapper struct for struct VkPhysicalDeviceSurfaceInfo2KHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSurfaceInfo2KHR.html
  struct PhysicalDeviceSurfaceInfo2KHR
  {
    using NativeType = VkPhysicalDeviceSurfaceInfo2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSurfaceInfo2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , surface{ surface_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

    operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

    operator VkPhysicalDeviceSurfaceInfo2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

    operator VkPhysicalDeviceSurfaceInfo2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, surface );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface );
#  endif
    }

    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
    const void *  pNext   = {};
    SurfaceKHR    surface = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSurfaceInfo2KHR>
  {
    using Type = PhysicalDeviceSurfaceInfo2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
  {
    using Type = PhysicalDeviceSurfaceInfo2KHR;
  };

  // wrapper struct for struct VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR.html
  struct PhysicalDeviceSwapchainMaintenance1FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesKHR( Bool32 swapchainMaintenance1_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainMaintenance1{ swapchainMaintenance1_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSwapchainMaintenance1FeaturesKHR( PhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSwapchainMaintenance1FeaturesKHR( VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSwapchainMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceSwapchainMaintenance1FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceSwapchainMaintenance1FeaturesKHR & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSwapchainMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSwapchainMaintenance1FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesKHR & setSwapchainMaintenance1( Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainMaintenance1 = swapchainMaintenance1_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainMaintenance1 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainMaintenance1 == rhs.swapchainMaintenance1 );
#  endif
    }

    bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesKHR;
    void *        pNext                 = {};
    Bool32        swapchainMaintenance1 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSwapchainMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceSwapchainMaintenance1FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceSwapchainMaintenance1FeaturesKHR;
  };

  using PhysicalDeviceSwapchainMaintenance1FeaturesEXT = PhysicalDeviceSwapchainMaintenance1FeaturesKHR;

  // wrapper struct for struct VkPhysicalDeviceSynchronization2Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSynchronization2Features.html
  struct PhysicalDeviceSynchronization2Features
  {
    using NativeType = VkPhysicalDeviceSynchronization2Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceSynchronization2Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( Bool32 synchronization2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , synchronization2{ synchronization2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
    {
      synchronization2 = synchronization2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features *>( this );
    }

    operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features *>( this );
    }

    operator VkPhysicalDeviceSynchronization2Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceSynchronization2Features *>( this );
    }

    operator VkPhysicalDeviceSynchronization2Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceSynchronization2Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, synchronization2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 );
#  endif
    }

    bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceSynchronization2Features;
    void *        pNext            = {};
    Bool32        synchronization2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceSynchronization2Features>
  {
    using Type = PhysicalDeviceSynchronization2Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
  {
    using Type = PhysicalDeviceSynchronization2Features;
  };

  using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;

  // wrapper struct for struct VkPhysicalDeviceTensorFeaturesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTensorFeaturesARM.html
  struct PhysicalDeviceTensorFeaturesARM
  {
    using NativeType = VkPhysicalDeviceTensorFeaturesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTensorFeaturesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorFeaturesARM( Bool32 tensorNonPacked_                               = {},
                                                          Bool32 shaderTensorAccess_                            = {},
                                                          Bool32 shaderStorageTensorArrayDynamicIndexing_       = {},
                                                          Bool32 shaderStorageTensorArrayNonUniformIndexing_    = {},
                                                          Bool32 descriptorBindingStorageTensorUpdateAfterBind_ = {},
                                                          Bool32 tensors_                                       = {},
                                                          void * pNext_                                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorNonPacked{ tensorNonPacked_ }
      , shaderTensorAccess{ shaderTensorAccess_ }
      , shaderStorageTensorArrayDynamicIndexing{ shaderStorageTensorArrayDynamicIndexing_ }
      , shaderStorageTensorArrayNonUniformIndexing{ shaderStorageTensorArrayNonUniformIndexing_ }
      , descriptorBindingStorageTensorUpdateAfterBind{ descriptorBindingStorageTensorUpdateAfterBind_ }
      , tensors{ tensors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorFeaturesARM( PhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTensorFeaturesARM( VkPhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTensorFeaturesARM( *reinterpret_cast<PhysicalDeviceTensorFeaturesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTensorFeaturesARM & operator=( PhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTensorFeaturesARM & operator=( VkPhysicalDeviceTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTensorFeaturesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setTensorNonPacked( Bool32 tensorNonPacked_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorNonPacked = tensorNonPacked_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setShaderTensorAccess( Bool32 shaderTensorAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTensorAccess = shaderTensorAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM &
      setShaderStorageTensorArrayDynamicIndexing( Bool32 shaderStorageTensorArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTensorArrayDynamicIndexing = shaderStorageTensorArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM &
      setShaderStorageTensorArrayNonUniformIndexing( Bool32 shaderStorageTensorArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTensorArrayNonUniformIndexing = shaderStorageTensorArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM &
      setDescriptorBindingStorageTensorUpdateAfterBind( Bool32 descriptorBindingStorageTensorUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageTensorUpdateAfterBind = descriptorBindingStorageTensorUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTensorFeaturesARM & setTensors( Bool32 tensors_ ) VULKAN_HPP_NOEXCEPT
    {
      tensors = tensors_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTensorFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceTensorFeaturesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceTensorFeaturesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTensorFeaturesARM *>( this );
    }

    operator VkPhysicalDeviceTensorFeaturesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTensorFeaturesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       tensorNonPacked,
                       shaderTensorAccess,
                       shaderStorageTensorArrayDynamicIndexing,
                       shaderStorageTensorArrayNonUniformIndexing,
                       descriptorBindingStorageTensorUpdateAfterBind,
                       tensors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTensorFeaturesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorNonPacked == rhs.tensorNonPacked ) &&
             ( shaderTensorAccess == rhs.shaderTensorAccess ) && ( shaderStorageTensorArrayDynamicIndexing == rhs.shaderStorageTensorArrayDynamicIndexing ) &&
             ( shaderStorageTensorArrayNonUniformIndexing == rhs.shaderStorageTensorArrayNonUniformIndexing ) &&
             ( descriptorBindingStorageTensorUpdateAfterBind == rhs.descriptorBindingStorageTensorUpdateAfterBind ) && ( tensors == rhs.tensors );
#  endif
    }

    bool operator!=( PhysicalDeviceTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                         = StructureType::ePhysicalDeviceTensorFeaturesARM;
    void *        pNext                                         = {};
    Bool32        tensorNonPacked                               = {};
    Bool32        shaderTensorAccess                            = {};
    Bool32        shaderStorageTensorArrayDynamicIndexing       = {};
    Bool32        shaderStorageTensorArrayNonUniformIndexing    = {};
    Bool32        descriptorBindingStorageTensorUpdateAfterBind = {};
    Bool32        tensors                                       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTensorFeaturesARM>
  {
    using Type = PhysicalDeviceTensorFeaturesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTensorFeaturesARM>
  {
    using Type = PhysicalDeviceTensorFeaturesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceTensorPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTensorPropertiesARM.html
  struct PhysicalDeviceTensorPropertiesARM
  {
    using NativeType = VkPhysicalDeviceTensorPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTensorPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorPropertiesARM( uint32_t         maxTensorDimensionCount_                            = {},
                                                            uint64_t         maxTensorElements_                                  = {},
                                                            uint64_t         maxPerDimensionTensorElements_                      = {},
                                                            int64_t          maxTensorStride_                                    = {},
                                                            uint64_t         maxTensorSize_                                      = {},
                                                            uint32_t         maxTensorShaderAccessArrayLength_                   = {},
                                                            uint32_t         maxTensorShaderAccessSize_                          = {},
                                                            uint32_t         maxDescriptorSetStorageTensors_                     = {},
                                                            uint32_t         maxPerStageDescriptorSetStorageTensors_             = {},
                                                            uint32_t         maxDescriptorSetUpdateAfterBindStorageTensors_      = {},
                                                            uint32_t         maxPerStageDescriptorUpdateAfterBindStorageTensors_ = {},
                                                            Bool32           shaderStorageTensorArrayNonUniformIndexingNative_   = {},
                                                            ShaderStageFlags shaderTensorSupportedStages_                        = {},
                                                            void *           pNext_                                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxTensorDimensionCount{ maxTensorDimensionCount_ }
      , maxTensorElements{ maxTensorElements_ }
      , maxPerDimensionTensorElements{ maxPerDimensionTensorElements_ }
      , maxTensorStride{ maxTensorStride_ }
      , maxTensorSize{ maxTensorSize_ }
      , maxTensorShaderAccessArrayLength{ maxTensorShaderAccessArrayLength_ }
      , maxTensorShaderAccessSize{ maxTensorShaderAccessSize_ }
      , maxDescriptorSetStorageTensors{ maxDescriptorSetStorageTensors_ }
      , maxPerStageDescriptorSetStorageTensors{ maxPerStageDescriptorSetStorageTensors_ }
      , maxDescriptorSetUpdateAfterBindStorageTensors{ maxDescriptorSetUpdateAfterBindStorageTensors_ }
      , maxPerStageDescriptorUpdateAfterBindStorageTensors{ maxPerStageDescriptorUpdateAfterBindStorageTensors_ }
      , shaderStorageTensorArrayNonUniformIndexingNative{ shaderStorageTensorArrayNonUniformIndexingNative_ }
      , shaderTensorSupportedStages{ shaderTensorSupportedStages_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTensorPropertiesARM( PhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTensorPropertiesARM( VkPhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTensorPropertiesARM( *reinterpret_cast<PhysicalDeviceTensorPropertiesARM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTensorPropertiesARM & operator=( PhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTensorPropertiesARM & operator=( VkPhysicalDeviceTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTensorPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTensorPropertiesARM *>( this );
    }

    operator VkPhysicalDeviceTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTensorPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint64_t const &,
               uint64_t const &,
               int64_t const &,
               uint64_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               ShaderStageFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxTensorDimensionCount,
                       maxTensorElements,
                       maxPerDimensionTensorElements,
                       maxTensorStride,
                       maxTensorSize,
                       maxTensorShaderAccessArrayLength,
                       maxTensorShaderAccessSize,
                       maxDescriptorSetStorageTensors,
                       maxPerStageDescriptorSetStorageTensors,
                       maxDescriptorSetUpdateAfterBindStorageTensors,
                       maxPerStageDescriptorUpdateAfterBindStorageTensors,
                       shaderStorageTensorArrayNonUniformIndexingNative,
                       shaderTensorSupportedStages );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTensorPropertiesARM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTensorDimensionCount == rhs.maxTensorDimensionCount ) &&
             ( maxTensorElements == rhs.maxTensorElements ) && ( maxPerDimensionTensorElements == rhs.maxPerDimensionTensorElements ) &&
             ( maxTensorStride == rhs.maxTensorStride ) && ( maxTensorSize == rhs.maxTensorSize ) &&
             ( maxTensorShaderAccessArrayLength == rhs.maxTensorShaderAccessArrayLength ) && ( maxTensorShaderAccessSize == rhs.maxTensorShaderAccessSize ) &&
             ( maxDescriptorSetStorageTensors == rhs.maxDescriptorSetStorageTensors ) &&
             ( maxPerStageDescriptorSetStorageTensors == rhs.maxPerStageDescriptorSetStorageTensors ) &&
             ( maxDescriptorSetUpdateAfterBindStorageTensors == rhs.maxDescriptorSetUpdateAfterBindStorageTensors ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageTensors == rhs.maxPerStageDescriptorUpdateAfterBindStorageTensors ) &&
             ( shaderStorageTensorArrayNonUniformIndexingNative == rhs.shaderStorageTensorArrayNonUniformIndexingNative ) &&
             ( shaderTensorSupportedStages == rhs.shaderTensorSupportedStages );
#  endif
    }

    bool operator!=( PhysicalDeviceTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                                              = StructureType::ePhysicalDeviceTensorPropertiesARM;
    void *           pNext                                              = {};
    uint32_t         maxTensorDimensionCount                            = {};
    uint64_t         maxTensorElements                                  = {};
    uint64_t         maxPerDimensionTensorElements                      = {};
    int64_t          maxTensorStride                                    = {};
    uint64_t         maxTensorSize                                      = {};
    uint32_t         maxTensorShaderAccessArrayLength                   = {};
    uint32_t         maxTensorShaderAccessSize                          = {};
    uint32_t         maxDescriptorSetStorageTensors                     = {};
    uint32_t         maxPerStageDescriptorSetStorageTensors             = {};
    uint32_t         maxDescriptorSetUpdateAfterBindStorageTensors      = {};
    uint32_t         maxPerStageDescriptorUpdateAfterBindStorageTensors = {};
    Bool32           shaderStorageTensorArrayNonUniformIndexingNative   = {};
    ShaderStageFlags shaderTensorSupportedStages                        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTensorPropertiesARM>
  {
    using Type = PhysicalDeviceTensorPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTensorPropertiesARM>
  {
    using Type = PhysicalDeviceTensorPropertiesARM;
  };

  // wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT.html
  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( Bool32 texelBufferAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , texelBufferAlignment{ texelBufferAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
    {
      texelBufferAlignment = texelBufferAlignment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, texelBufferAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
    void *        pNext                = {};
    Bool32        texelBufferAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentProperties.html
  struct PhysicalDeviceTexelBufferAlignmentProperties
  {
    using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( DeviceSize storageTexelBufferOffsetAlignmentBytes_       = {},
                                                                       Bool32     storageTexelBufferOffsetSingleTexelAlignment_ = {},
                                                                       DeviceSize uniformTexelBufferOffsetAlignmentBytes_       = {},
                                                                       Bool32     uniformTexelBufferOffsetSingleTexelAlignment_ = {},
                                                                       void *     pNext_                                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ }
      , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ }
      , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ }
      , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, Bool32 const &, DeviceSize const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       storageTexelBufferOffsetAlignmentBytes,
                       storageTexelBufferOffsetSingleTexelAlignment,
                       uniformTexelBufferOffsetAlignmentBytes,
                       uniformTexelBufferOffsetSingleTexelAlignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
             ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
             ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
             ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
#  endif
    }

    bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                        = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
    void *        pNext                                        = {};
    DeviceSize    storageTexelBufferOffsetAlignmentBytes       = {};
    Bool32        storageTexelBufferOffsetSingleTexelAlignment = {};
    DeviceSize    uniformTexelBufferOffsetAlignmentBytes       = {};
    Bool32        uniformTexelBufferOffsetSingleTexelAlignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTexelBufferAlignmentProperties>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentProperties;
  };

  using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;

  // wrapper struct for struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeatures.html
  struct PhysicalDeviceTextureCompressionASTCHDRFeatures
  {
    using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( Bool32 textureCompressionASTC_HDR_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures &
      setTextureCompressionASTC_HDR( Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
    }

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
    }

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
    }

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, textureCompressionASTC_HDR );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
#  endif
    }

    bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
    void *        pNext                      = {};
    Bool32        textureCompressionASTC_HDR = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTextureCompressionASTCHDRFeatures>
  {
    using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
  {
    using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
  };

  using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;

  // wrapper struct for struct VkPhysicalDeviceTileMemoryHeapFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapFeaturesQCOM.html
  struct PhysicalDeviceTileMemoryHeapFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceTileMemoryHeapFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM( Bool32 tileMemoryHeap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tileMemoryHeap{ tileMemoryHeap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTileMemoryHeapFeaturesQCOM( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTileMemoryHeapFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTileMemoryHeapFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTileMemoryHeapFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setTileMemoryHeap( Bool32 tileMemoryHeap_ ) VULKAN_HPP_NOEXCEPT
    {
      tileMemoryHeap = tileMemoryHeap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tileMemoryHeap );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileMemoryHeap == rhs.tileMemoryHeap );
#  endif
    }

    bool operator!=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM;
    void *        pNext          = {};
    Bool32        tileMemoryHeap = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTileMemoryHeapFeaturesQCOM>
  {
    using Type = PhysicalDeviceTileMemoryHeapFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM>
  {
    using Type = PhysicalDeviceTileMemoryHeapFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceTileMemoryHeapPropertiesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapPropertiesQCOM.html
  struct PhysicalDeviceTileMemoryHeapPropertiesQCOM
  {
    using NativeType = VkPhysicalDeviceTileMemoryHeapPropertiesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM( Bool32 queueSubmitBoundary_ = {},
                                                                     Bool32 tileBufferTransfers_ = {},
                                                                     void * pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueSubmitBoundary{ queueSubmitBoundary_ }
      , tileBufferTransfers{ tileBufferTransfers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTileMemoryHeapPropertiesQCOM( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTileMemoryHeapPropertiesQCOM( *reinterpret_cast<PhysicalDeviceTileMemoryHeapPropertiesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTileMemoryHeapPropertiesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setQueueSubmitBoundary( Bool32 queueSubmitBoundary_ ) VULKAN_HPP_NOEXCEPT
    {
      queueSubmitBoundary = queueSubmitBoundary_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setTileBufferTransfers( Bool32 tileBufferTransfers_ ) VULKAN_HPP_NOEXCEPT
    {
      tileBufferTransfers = tileBufferTransfers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queueSubmitBoundary, tileBufferTransfers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueSubmitBoundary == rhs.queueSubmitBoundary ) &&
             ( tileBufferTransfers == rhs.tileBufferTransfers );
#  endif
    }

    bool operator!=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM;
    void *        pNext               = {};
    Bool32        queueSubmitBoundary = {};
    Bool32        tileBufferTransfers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTileMemoryHeapPropertiesQCOM>
  {
    using Type = PhysicalDeviceTileMemoryHeapPropertiesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM>
  {
    using Type = PhysicalDeviceTileMemoryHeapPropertiesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceTilePropertiesFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTilePropertiesFeaturesQCOM.html
  struct PhysicalDeviceTilePropertiesFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( Bool32 tileProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tileProperties{ tileProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTilePropertiesFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT
    {
      tileProperties = tileProperties_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTilePropertiesFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTilePropertiesFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tileProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties );
#  endif
    }

    bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM;
    void *        pNext          = {};
    Bool32        tileProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTilePropertiesFeaturesQCOM>
  {
    using Type = PhysicalDeviceTilePropertiesFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM>
  {
    using Type = PhysicalDeviceTilePropertiesFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceTileShadingFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingFeaturesQCOM.html
  struct PhysicalDeviceTileShadingFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceTileShadingFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM( Bool32 tileShading_                   = {},
                                                                Bool32 tileShadingFragmentStage_      = {},
                                                                Bool32 tileShadingColorAttachments_   = {},
                                                                Bool32 tileShadingDepthAttachments_   = {},
                                                                Bool32 tileShadingStencilAttachments_ = {},
                                                                Bool32 tileShadingInputAttachments_   = {},
                                                                Bool32 tileShadingSampledAttachments_ = {},
                                                                Bool32 tileShadingPerTileDraw_        = {},
                                                                Bool32 tileShadingPerTileDispatch_    = {},
                                                                Bool32 tileShadingDispatchTile_       = {},
                                                                Bool32 tileShadingApron_              = {},
                                                                Bool32 tileShadingAnisotropicApron_   = {},
                                                                Bool32 tileShadingAtomicOps_          = {},
                                                                Bool32 tileShadingImageProcessing_    = {},
                                                                void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tileShading{ tileShading_ }
      , tileShadingFragmentStage{ tileShadingFragmentStage_ }
      , tileShadingColorAttachments{ tileShadingColorAttachments_ }
      , tileShadingDepthAttachments{ tileShadingDepthAttachments_ }
      , tileShadingStencilAttachments{ tileShadingStencilAttachments_ }
      , tileShadingInputAttachments{ tileShadingInputAttachments_ }
      , tileShadingSampledAttachments{ tileShadingSampledAttachments_ }
      , tileShadingPerTileDraw{ tileShadingPerTileDraw_ }
      , tileShadingPerTileDispatch{ tileShadingPerTileDispatch_ }
      , tileShadingDispatchTile{ tileShadingDispatchTile_ }
      , tileShadingApron{ tileShadingApron_ }
      , tileShadingAnisotropicApron{ tileShadingAnisotropicApron_ }
      , tileShadingAtomicOps{ tileShadingAtomicOps_ }
      , tileShadingImageProcessing{ tileShadingImageProcessing_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTileShadingFeaturesQCOM( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTileShadingFeaturesQCOM( *reinterpret_cast<PhysicalDeviceTileShadingFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTileShadingFeaturesQCOM & operator=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTileShadingFeaturesQCOM & operator=( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTileShadingFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShading( Bool32 tileShading_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShading = tileShading_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingFragmentStage( Bool32 tileShadingFragmentStage_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingFragmentStage = tileShadingFragmentStage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingColorAttachments( Bool32 tileShadingColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingColorAttachments = tileShadingColorAttachments_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingDepthAttachments( Bool32 tileShadingDepthAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingDepthAttachments = tileShadingDepthAttachments_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM &
      setTileShadingStencilAttachments( Bool32 tileShadingStencilAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingStencilAttachments = tileShadingStencilAttachments_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingInputAttachments( Bool32 tileShadingInputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingInputAttachments = tileShadingInputAttachments_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM &
      setTileShadingSampledAttachments( Bool32 tileShadingSampledAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingSampledAttachments = tileShadingSampledAttachments_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingPerTileDraw( Bool32 tileShadingPerTileDraw_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingPerTileDraw = tileShadingPerTileDraw_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingPerTileDispatch( Bool32 tileShadingPerTileDispatch_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingPerTileDispatch = tileShadingPerTileDispatch_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingDispatchTile( Bool32 tileShadingDispatchTile_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingDispatchTile = tileShadingDispatchTile_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingApron( Bool32 tileShadingApron_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingApron = tileShadingApron_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingAnisotropicApron( Bool32 tileShadingAnisotropicApron_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingAnisotropicApron = tileShadingAnisotropicApron_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingAtomicOps( Bool32 tileShadingAtomicOps_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingAtomicOps = tileShadingAtomicOps_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingImageProcessing( Bool32 tileShadingImageProcessing_ ) VULKAN_HPP_NOEXCEPT
    {
      tileShadingImageProcessing = tileShadingImageProcessing_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTileShadingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTileShadingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTileShadingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTileShadingFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTileShadingFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       tileShading,
                       tileShadingFragmentStage,
                       tileShadingColorAttachments,
                       tileShadingDepthAttachments,
                       tileShadingStencilAttachments,
                       tileShadingInputAttachments,
                       tileShadingSampledAttachments,
                       tileShadingPerTileDraw,
                       tileShadingPerTileDispatch,
                       tileShadingDispatchTile,
                       tileShadingApron,
                       tileShadingAnisotropicApron,
                       tileShadingAtomicOps,
                       tileShadingImageProcessing );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTileShadingFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileShading == rhs.tileShading ) &&
             ( tileShadingFragmentStage == rhs.tileShadingFragmentStage ) && ( tileShadingColorAttachments == rhs.tileShadingColorAttachments ) &&
             ( tileShadingDepthAttachments == rhs.tileShadingDepthAttachments ) && ( tileShadingStencilAttachments == rhs.tileShadingStencilAttachments ) &&
             ( tileShadingInputAttachments == rhs.tileShadingInputAttachments ) && ( tileShadingSampledAttachments == rhs.tileShadingSampledAttachments ) &&
             ( tileShadingPerTileDraw == rhs.tileShadingPerTileDraw ) && ( tileShadingPerTileDispatch == rhs.tileShadingPerTileDispatch ) &&
             ( tileShadingDispatchTile == rhs.tileShadingDispatchTile ) && ( tileShadingApron == rhs.tileShadingApron ) &&
             ( tileShadingAnisotropicApron == rhs.tileShadingAnisotropicApron ) && ( tileShadingAtomicOps == rhs.tileShadingAtomicOps ) &&
             ( tileShadingImageProcessing == rhs.tileShadingImageProcessing );
#  endif
    }

    bool operator!=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM;
    void *        pNext                         = {};
    Bool32        tileShading                   = {};
    Bool32        tileShadingFragmentStage      = {};
    Bool32        tileShadingColorAttachments   = {};
    Bool32        tileShadingDepthAttachments   = {};
    Bool32        tileShadingStencilAttachments = {};
    Bool32        tileShadingInputAttachments   = {};
    Bool32        tileShadingSampledAttachments = {};
    Bool32        tileShadingPerTileDraw        = {};
    Bool32        tileShadingPerTileDispatch    = {};
    Bool32        tileShadingDispatchTile       = {};
    Bool32        tileShadingApron              = {};
    Bool32        tileShadingAnisotropicApron   = {};
    Bool32        tileShadingAtomicOps          = {};
    Bool32        tileShadingImageProcessing    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTileShadingFeaturesQCOM>
  {
    using Type = PhysicalDeviceTileShadingFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTileShadingFeaturesQCOM>
  {
    using Type = PhysicalDeviceTileShadingFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceTileShadingPropertiesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingPropertiesQCOM.html
  struct PhysicalDeviceTileShadingPropertiesQCOM
  {
    using NativeType = VkPhysicalDeviceTileShadingPropertiesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM( uint32_t maxApronSize_       = {},
                                                                  Bool32   preferNonCoherent_  = {},
                                                                  Extent2D tileGranularity_    = {},
                                                                  Extent2D maxTileShadingRate_ = {},
                                                                  void *   pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxApronSize{ maxApronSize_ }
      , preferNonCoherent{ preferNonCoherent_ }
      , tileGranularity{ tileGranularity_ }
      , maxTileShadingRate{ maxTileShadingRate_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTileShadingPropertiesQCOM( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTileShadingPropertiesQCOM( *reinterpret_cast<PhysicalDeviceTileShadingPropertiesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceTileShadingPropertiesQCOM & operator=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTileShadingPropertiesQCOM & operator=( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTileShadingPropertiesQCOM const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTileShadingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTileShadingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTileShadingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTileShadingPropertiesQCOM *>( this );
    }

    operator VkPhysicalDeviceTileShadingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTileShadingPropertiesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, Bool32 const &, Extent2D const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxApronSize, preferNonCoherent, tileGranularity, maxTileShadingRate );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTileShadingPropertiesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxApronSize == rhs.maxApronSize ) && ( preferNonCoherent == rhs.preferNonCoherent ) &&
             ( tileGranularity == rhs.tileGranularity ) && ( maxTileShadingRate == rhs.maxTileShadingRate );
#  endif
    }

    bool operator!=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM;
    void *        pNext              = {};
    uint32_t      maxApronSize       = {};
    Bool32        preferNonCoherent  = {};
    Extent2D      tileGranularity    = {};
    Extent2D      maxTileShadingRate = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTileShadingPropertiesQCOM>
  {
    using Type = PhysicalDeviceTileShadingPropertiesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTileShadingPropertiesQCOM>
  {
    using Type = PhysicalDeviceTileShadingPropertiesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreFeatures.html
  struct PhysicalDeviceTimelineSemaphoreFeatures
  {
    using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( Bool32 timelineSemaphore_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , timelineSemaphore{ timelineSemaphore_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      timelineSemaphore = timelineSemaphore_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, timelineSemaphore );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore );
#  endif
    }

    bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
    void *        pNext             = {};
    Bool32        timelineSemaphore = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTimelineSemaphoreFeatures>
  {
    using Type = PhysicalDeviceTimelineSemaphoreFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
  {
    using Type = PhysicalDeviceTimelineSemaphoreFeatures;
  };

  using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;

  // wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreProperties.html
  struct PhysicalDeviceTimelineSemaphoreProperties
  {
    using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {},
                                                                    void *   pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
#  endif
    }

    bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
    void *        pNext                               = {};
    uint64_t      maxTimelineSemaphoreValueDifference = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTimelineSemaphoreProperties>
  {
    using Type = PhysicalDeviceTimelineSemaphoreProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
  {
    using Type = PhysicalDeviceTimelineSemaphoreProperties;
  };

  using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;

  // wrapper struct for struct VkPhysicalDeviceToolProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceToolProperties.html
  struct PhysicalDeviceToolProperties
  {
    using NativeType = VkPhysicalDeviceToolProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceToolProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & name_        = {},
                                                          std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & version_     = {},
                                                          ToolPurposeFlags                                     purposes_    = {},
                                                          std::array<char, VK_MAX_DESCRIPTION_SIZE> const &    description_ = {},
                                                          std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layer_       = {},
                                                          void *                                               pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , name{ name_ }
      , version{ version_ }
      , purposes{ purposes_ }
      , description{ description_ }
      , layer{ layer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceToolProperties *>( this );
    }

    operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceToolProperties *>( this );
    }

    operator VkPhysicalDeviceToolProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceToolProperties *>( this );
    }

    operator VkPhysicalDeviceToolProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceToolProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &,
               ToolPurposeFlags const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, name, version, purposes, description, layer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) &&
             ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 );
    }

    bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                    sType       = StructureType::ePhysicalDeviceToolProperties;
    void *                                           pNext       = {};
    ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name        = {};
    ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version     = {};
    ToolPurposeFlags                                 purposes    = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE>    description = {};
    ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceToolProperties>
  {
    using Type = PhysicalDeviceToolProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
  {
    using Type = PhysicalDeviceToolProperties;
  };

  using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;

  // wrapper struct for struct VkPhysicalDeviceTransformFeedbackFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackFeaturesEXT.html
  struct PhysicalDeviceTransformFeedbackFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceTransformFeedbackFeaturesEXT( Bool32 transformFeedback_ = {}, Bool32 geometryStreams_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , transformFeedback{ transformFeedback_ }
      , geometryStreams{ geometryStreams_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      transformFeedback = transformFeedback_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryStreams = geometryStreams_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, transformFeedback, geometryStreams );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams );
#  endif
    }

    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
    void *        pNext             = {};
    Bool32        transformFeedback = {};
    Bool32        geometryStreams   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTransformFeedbackFeaturesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceTransformFeedbackPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackPropertiesEXT.html
  struct PhysicalDeviceTransformFeedbackPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t   maxTransformFeedbackStreams_                = {},
                                                                       uint32_t   maxTransformFeedbackBuffers_                = {},
                                                                       DeviceSize maxTransformFeedbackBufferSize_             = {},
                                                                       uint32_t   maxTransformFeedbackStreamDataSize_         = {},
                                                                       uint32_t   maxTransformFeedbackBufferDataSize_         = {},
                                                                       uint32_t   maxTransformFeedbackBufferDataStride_       = {},
                                                                       Bool32     transformFeedbackQueries_                   = {},
                                                                       Bool32     transformFeedbackStreamsLinesTriangles_     = {},
                                                                       Bool32     transformFeedbackRasterizationStreamSelect_ = {},
                                                                       Bool32     transformFeedbackDraw_                      = {},
                                                                       void *     pNext_                                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxTransformFeedbackStreams{ maxTransformFeedbackStreams_ }
      , maxTransformFeedbackBuffers{ maxTransformFeedbackBuffers_ }
      , maxTransformFeedbackBufferSize{ maxTransformFeedbackBufferSize_ }
      , maxTransformFeedbackStreamDataSize{ maxTransformFeedbackStreamDataSize_ }
      , maxTransformFeedbackBufferDataSize{ maxTransformFeedbackBufferDataSize_ }
      , maxTransformFeedbackBufferDataStride{ maxTransformFeedbackBufferDataStride_ }
      , transformFeedbackQueries{ transformFeedbackQueries_ }
      , transformFeedbackStreamsLinesTriangles{ transformFeedbackStreamsLinesTriangles_ }
      , transformFeedbackRasterizationStreamSelect{ transformFeedbackRasterizationStreamSelect_ }
      , transformFeedbackDraw{ transformFeedbackDraw_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       maxTransformFeedbackStreams,
                       maxTransformFeedbackBuffers,
                       maxTransformFeedbackBufferSize,
                       maxTransformFeedbackStreamDataSize,
                       maxTransformFeedbackBufferDataSize,
                       maxTransformFeedbackBufferDataStride,
                       transformFeedbackQueries,
                       transformFeedbackStreamsLinesTriangles,
                       transformFeedbackRasterizationStreamSelect,
                       transformFeedbackDraw );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) &&
             ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) &&
             ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) &&
             ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) &&
             ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) &&
             ( transformFeedbackQueries == rhs.transformFeedbackQueries ) &&
             ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) &&
             ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) &&
             ( transformFeedbackDraw == rhs.transformFeedbackDraw );
#  endif
    }

    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                      = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
    void *        pNext                                      = {};
    uint32_t      maxTransformFeedbackStreams                = {};
    uint32_t      maxTransformFeedbackBuffers                = {};
    DeviceSize    maxTransformFeedbackBufferSize             = {};
    uint32_t      maxTransformFeedbackStreamDataSize         = {};
    uint32_t      maxTransformFeedbackBufferDataSize         = {};
    uint32_t      maxTransformFeedbackBufferDataStride       = {};
    Bool32        transformFeedbackQueries                   = {};
    Bool32        transformFeedbackStreamsLinesTriangles     = {};
    Bool32        transformFeedbackRasterizationStreamSelect = {};
    Bool32        transformFeedbackDraw                      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceTransformFeedbackPropertiesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR.html
  struct PhysicalDeviceUnifiedImageLayoutsFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( Bool32 unifiedImageLayouts_      = {},
                                                                       Bool32 unifiedImageLayoutsVideo_ = {},
                                                                       void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , unifiedImageLayouts{ unifiedImageLayouts_ }
      , unifiedImageLayoutsVideo{ unifiedImageLayoutsVideo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceUnifiedImageLayoutsFeaturesKHR( *reinterpret_cast<PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & operator=( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & operator=( VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setUnifiedImageLayouts( Bool32 unifiedImageLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      unifiedImageLayouts = unifiedImageLayouts_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUnifiedImageLayoutsFeaturesKHR & setUnifiedImageLayoutsVideo( Bool32 unifiedImageLayoutsVideo_ ) VULKAN_HPP_NOEXCEPT
    {
      unifiedImageLayoutsVideo = unifiedImageLayoutsVideo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, unifiedImageLayouts, unifiedImageLayoutsVideo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unifiedImageLayouts == rhs.unifiedImageLayouts ) &&
             ( unifiedImageLayoutsVideo == rhs.unifiedImageLayoutsVideo );
#  endif
    }

    bool operator!=( PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
    void *        pNext                    = {};
    Bool32        unifiedImageLayouts      = {};
    Bool32        unifiedImageLayoutsVideo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR>
  {
    using Type = PhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR>
  {
    using Type = PhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceUniformBufferStandardLayoutFeatures.html
  struct PhysicalDeviceUniformBufferStandardLayoutFeatures
  {
    using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( Bool32 uniformBufferStandardLayout_ = {},
                                                                            void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , uniformBufferStandardLayout{ uniformBufferStandardLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceUniformBufferStandardLayoutFeatures &
      operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &
      setUniformBufferStandardLayout( Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformBufferStandardLayout = uniformBufferStandardLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, uniformBufferStandardLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
#  endif
    }

    bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
    void *        pNext                       = {};
    Bool32        uniformBufferStandardLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>
  {
    using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
  {
    using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
  };

  using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;

  // wrapper struct for struct VkPhysicalDeviceVariablePointersFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVariablePointersFeatures.html
  struct PhysicalDeviceVariablePointersFeatures
  {
    using NativeType = VkPhysicalDeviceVariablePointersFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVariablePointersFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( Bool32 variablePointersStorageBuffer_ = {},
                                                                 Bool32 variablePointers_              = {},
                                                                 void * pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , variablePointersStorageBuffer{ variablePointersStorageBuffer_ }
      , variablePointers{ variablePointers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &
      setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointersStorageBuffer = variablePointersStorageBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointers = variablePointers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

    operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

    operator VkPhysicalDeviceVariablePointersFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

    operator VkPhysicalDeviceVariablePointersFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
             ( variablePointers == rhs.variablePointers );
#  endif
    }

    bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                         = StructureType::ePhysicalDeviceVariablePointersFeatures;
    void *        pNext                         = {};
    Bool32        variablePointersStorageBuffer = {};
    Bool32        variablePointers              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVariablePointersFeatures>
  {
    using Type = PhysicalDeviceVariablePointersFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
  {
    using Type = PhysicalDeviceVariablePointersFeatures;
  };

  using PhysicalDeviceVariablePointerFeatures     = PhysicalDeviceVariablePointersFeatures;
  using PhysicalDeviceVariablePointerFeaturesKHR  = PhysicalDeviceVariablePointersFeatures;
  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;

  // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorFeatures.html
  struct PhysicalDeviceVertexAttributeDivisorFeatures
  {
    using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( Bool32 vertexAttributeInstanceRateDivisor_     = {},
                                                                       Bool32 vertexAttributeInstanceRateZeroDivisor_ = {},
                                                                       void * pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ }
      , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorFeatures( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeDivisorFeatures( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceVertexAttributeDivisorFeatures & operator=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVertexAttributeDivisorFeatures & operator=( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures &
      setVertexAttributeInstanceRateDivisor( Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures &
      setVertexAttributeInstanceRateZeroDivisor( Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVertexAttributeDivisorFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeatures *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeatures *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeatures *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) &&
             ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
#  endif
    }

    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures;
    void *        pNext                                  = {};
    Bool32        vertexAttributeInstanceRateDivisor     = {};
    Bool32        vertexAttributeInstanceRateZeroDivisor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVertexAttributeDivisorFeatures>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorFeatures;
  };

  using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures;
  using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures;

  // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorProperties.html
  struct PhysicalDeviceVertexAttributeDivisorProperties
  {
    using NativeType = VkPhysicalDeviceVertexAttributeDivisorProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties( uint32_t maxVertexAttribDivisor_       = {},
                                                                         Bool32   supportsNonZeroFirstInstance_ = {},
                                                                         void *   pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxVertexAttribDivisor{ maxVertexAttribDivisor_ }
      , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVertexAttributeDivisorProperties( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorProperties( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeDivisorProperties( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorProperties const *>( &rhs ) )
    {
    }

    PhysicalDeviceVertexAttributeDivisorProperties & operator=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVertexAttributeDivisorProperties & operator=( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVertexAttributeDivisorProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorProperties *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorProperties *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorProperties *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeDivisorProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) &&
             ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance );
#  endif
    }

    bool operator!=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                        = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties;
    void *        pNext                        = {};
    uint32_t      maxVertexAttribDivisor       = {};
    Bool32        supportsNonZeroFirstInstance = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVertexAttributeDivisorProperties>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorProperties>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorProperties;
  };

  using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties;

  // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT.html
  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
  {
    using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxVertexAttribDivisor{ maxVertexAttribDivisor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
      operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxVertexAttribDivisor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
#  endif
    }

    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
    void *        pNext                  = {};
    uint32_t      maxVertexAttribDivisor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT.html
  struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( Bool32 vertexAttributeRobustness_ = {},
                                                                             void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexAttributeRobustness{ vertexAttributeRobustness_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceVertexAttributeRobustnessFeaturesEXT &
      operator=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT &
      setVertexAttributeRobustness( Bool32 vertexAttributeRobustness_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeRobustness = vertexAttributeRobustness_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexAttributeRobustness );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeRobustness == rhs.vertexAttributeRobustness );
#  endif
    }

    bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
    void *        pNext                     = {};
    Bool32        vertexAttributeRobustness = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT.html
  struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( Bool32 vertexInputDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexInputDynamicState{ vertexInputDynamicState_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setVertexInputDynamicState( Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexInputDynamicState = vertexInputDynamicState_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexInputDynamicState );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState );
#  endif
    }

    bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
    void *        pNext                   = {};
    Bool32        vertexInputDynamicState = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoDecodeVP9FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoDecodeVP9FeaturesKHR.html
  struct PhysicalDeviceVideoDecodeVP9FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoDecodeVP9FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoDecodeVP9FeaturesKHR( Bool32 videoDecodeVP9_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoDecodeVP9{ videoDecodeVP9_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoDecodeVP9FeaturesKHR( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoDecodeVP9FeaturesKHR( VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoDecodeVP9FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoDecodeVP9FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoDecodeVP9FeaturesKHR & operator=( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoDecodeVP9FeaturesKHR & operator=( VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoDecodeVP9FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoDecodeVP9FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoDecodeVP9FeaturesKHR & setVideoDecodeVP9( Bool32 videoDecodeVP9_ ) VULKAN_HPP_NOEXCEPT
    {
      videoDecodeVP9 = videoDecodeVP9_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoDecodeVP9FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoDecodeVP9 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoDecodeVP9 == rhs.videoDecodeVP9 );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoDecodeVP9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR;
    void *        pNext          = {};
    Bool32        videoDecodeVP9 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoDecodeVP9FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoDecodeVP9FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoDecodeVP9FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeAV1FeaturesKHR.html
  struct PhysicalDeviceVideoEncodeAV1FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoEncodeAV1FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( Bool32 videoEncodeAV1_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoEncodeAV1{ videoEncodeAV1_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoEncodeAV1FeaturesKHR( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoEncodeAV1FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeAV1FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoEncodeAV1FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setVideoEncodeAV1( Bool32 videoEncodeAV1_ ) VULKAN_HPP_NOEXCEPT
    {
      videoEncodeAV1 = videoEncodeAV1_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoEncodeAV1 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeAV1 == rhs.videoEncodeAV1 );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR;
    void *        pNext          = {};
    Bool32        videoEncodeAV1 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoEncodeAV1FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR.html
  struct PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR( Bool32 videoEncodeIntraRefresh_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoEncodeIntraRefresh{ videoEncodeIntraRefresh_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR( PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR( VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR & operator=( PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR & setVideoEncodeIntraRefresh( Bool32 videoEncodeIntraRefresh_ ) VULKAN_HPP_NOEXCEPT
    {
      videoEncodeIntraRefresh = videoEncodeIntraRefresh_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoEncodeIntraRefresh );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeIntraRefresh == rhs.videoEncodeIntraRefresh );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::ePhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR;
    void *        pNext                   = {};
    Bool32        videoEncodeIntraRefresh = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeIntraRefreshFeaturesKHR;
  };

  // wrapper struct for struct VkVideoProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileInfoKHR.html
  struct VideoProfileInfoKHR
  {
    using NativeType = VkVideoProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VideoCodecOperationFlagBitsKHR::eNone,
                                              VideoChromaSubsamplingFlagsKHR chromaSubsampling_   = {},
                                              VideoComponentBitDepthFlagsKHR lumaBitDepth_        = {},
                                              VideoComponentBitDepthFlagsKHR chromaBitDepth_      = {},
                                              const void *                   pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoCodecOperation{ videoCodecOperation_ }
      , chromaSubsampling{ chromaSubsampling_ }
      , lumaBitDepth{ lumaBitDepth_ }
      , chromaBitDepth{ chromaBitDepth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast<VideoProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setVideoCodecOperation( VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
    {
      videoCodecOperation = videoCodecOperation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaSubsampling( VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
    {
      chromaSubsampling = chromaSubsampling_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      lumaBitDepth = lumaBitDepth_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      chromaBitDepth = chromaBitDepth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoProfileInfoKHR *>( this );
    }

    operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoProfileInfoKHR *>( this );
    }

    operator VkVideoProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoProfileInfoKHR *>( this );
    }

    operator VkVideoProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoCodecOperationFlagBitsKHR const &,
               VideoChromaSubsamplingFlagsKHR const &,
               VideoComponentBitDepthFlagsKHR const &,
               VideoComponentBitDepthFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoProfileInfoKHR const & ) const = default;
#else
    bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) &&
             ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth );
#  endif
    }

    bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType               = StructureType::eVideoProfileInfoKHR;
    const void *                   pNext               = {};
    VideoCodecOperationFlagBitsKHR videoCodecOperation = VideoCodecOperationFlagBitsKHR::eNone;
    VideoChromaSubsamplingFlagsKHR chromaSubsampling   = {};
    VideoComponentBitDepthFlagsKHR lumaBitDepth        = {};
    VideoComponentBitDepthFlagsKHR chromaBitDepth      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoProfileInfoKHR>
  {
    using Type = VideoProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoProfileInfoKHR>
  {
    using Type = VideoProfileInfoKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR.html
  struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR
  {
    using NativeType = VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( const VideoProfileInfoKHR * pVideoProfile_ = {},
                                                                       uint32_t                    qualityLevel_  = {},
                                                                       const void *                pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pVideoProfile{ pVideoProfile_ }
      , qualityLevel{ qualityLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoEncodeQualityLevelInfoKHR( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoEncodeQualityLevelInfoKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeQualityLevelInfoKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoEncodeQualityLevelInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPVideoProfile( const VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      pVideoProfile = pVideoProfile_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      qualityLevel = qualityLevel_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const VideoProfileInfoKHR * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pVideoProfile, qualityLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVideoProfile == rhs.pVideoProfile ) && ( qualityLevel == rhs.qualityLevel );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType         = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR;
    const void *                pNext         = {};
    const VideoProfileInfoKHR * pVideoProfile = {};
    uint32_t                    qualityLevel  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR>
  {
    using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR>
  {
    using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR.html
  struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( Bool32 videoEncodeQuantizationMap_ = {},
                                                                              void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoEncodeQuantizationMap{ videoEncodeQuantizationMap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &
      operator=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &
      setVideoEncodeQuantizationMap( Bool32 videoEncodeQuantizationMap_ ) VULKAN_HPP_NOEXCEPT
    {
      videoEncodeQuantizationMap = videoEncodeQuantizationMap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoEncodeQuantizationMap );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeQuantizationMap == rhs.videoEncodeQuantizationMap );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
    void *        pNext                      = {};
    Bool32        videoEncodeQuantizationMap = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR>
  {
    using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE.html
  struct PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE
  {
    using NativeType = VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE( Bool32 videoEncodeRgbConversion_ = {},
                                                                              void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoEncodeRgbConversion{ videoEncodeRgbConversion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE( PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE( VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE( *reinterpret_cast<PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE &
      operator=( PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE & operator=( VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE &
      setVideoEncodeRgbConversion( Bool32 videoEncodeRgbConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      videoEncodeRgbConversion = videoEncodeRgbConversion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoEncodeRgbConversion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeRgbConversion == rhs.videoEncodeRgbConversion );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::ePhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE;
    void *        pNext                    = {};
    Bool32        videoEncodeRgbConversion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE>
  {
    using Type = PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE>
  {
    using Type = PhysicalDeviceVideoEncodeRgbConversionFeaturesVALVE;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoFormatInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoFormatInfoKHR.html
  struct PhysicalDeviceVideoFormatInfoKHR
  {
    using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoFormatInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( ImageUsageFlags imageUsage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageUsage{ imageUsage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
    {
      imageUsage = imageUsage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoFormatInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoFormatInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageUsage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType      = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
    const void *    pNext      = {};
    ImageUsageFlags imageUsage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoFormatInfoKHR>
  {
    using Type = PhysicalDeviceVideoFormatInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
  {
    using Type = PhysicalDeviceVideoFormatInfoKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance1FeaturesKHR.html
  struct PhysicalDeviceVideoMaintenance1FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoMaintenance1FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( Bool32 videoMaintenance1_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoMaintenance1{ videoMaintenance1_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoMaintenance1FeaturesKHR( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoMaintenance1FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoMaintenance1FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoMaintenance1FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setVideoMaintenance1( Bool32 videoMaintenance1_ ) VULKAN_HPP_NOEXCEPT
    {
      videoMaintenance1 = videoMaintenance1_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoMaintenance1FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoMaintenance1FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoMaintenance1 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoMaintenance1FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance1 == rhs.videoMaintenance1 );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR;
    void *        pNext             = {};
    Bool32        videoMaintenance1 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVideoMaintenance2FeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance2FeaturesKHR.html
  struct PhysicalDeviceVideoMaintenance2FeaturesKHR
  {
    using NativeType = VkPhysicalDeviceVideoMaintenance2FeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( Bool32 videoMaintenance2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoMaintenance2{ videoMaintenance2_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoMaintenance2FeaturesKHR( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoMaintenance2FeaturesKHR( *reinterpret_cast<PhysicalDeviceVideoMaintenance2FeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVideoMaintenance2FeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setVideoMaintenance2( Bool32 videoMaintenance2_ ) VULKAN_HPP_NOEXCEPT
    {
      videoMaintenance2 = videoMaintenance2_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoMaintenance2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoMaintenance2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVideoMaintenance2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVideoMaintenance2FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoMaintenance2 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoMaintenance2FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance2 == rhs.videoMaintenance2 );
#  endif
    }

    bool operator!=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR;
    void *        pNext             = {};
    Bool32        videoMaintenance2 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVideoMaintenance2FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoMaintenance2FeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR>
  {
    using Type = PhysicalDeviceVideoMaintenance2FeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan11Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Features.html
  struct PhysicalDeviceVulkan11Features
  {
    using NativeType = VkPhysicalDeviceVulkan11Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan11Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( Bool32 storageBuffer16BitAccess_           = {},
                                                         Bool32 uniformAndStorageBuffer16BitAccess_ = {},
                                                         Bool32 storagePushConstant16_              = {},
                                                         Bool32 storageInputOutput16_               = {},
                                                         Bool32 multiview_                          = {},
                                                         Bool32 multiviewGeometryShader_            = {},
                                                         Bool32 multiviewTessellationShader_        = {},
                                                         Bool32 variablePointersStorageBuffer_      = {},
                                                         Bool32 variablePointers_                   = {},
                                                         Bool32 protectedMemory_                    = {},
                                                         Bool32 samplerYcbcrConversion_             = {},
                                                         Bool32 shaderDrawParameters_               = {},
                                                         void * pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , storageBuffer16BitAccess{ storageBuffer16BitAccess_ }
      , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ }
      , storagePushConstant16{ storagePushConstant16_ }
      , storageInputOutput16{ storageInputOutput16_ }
      , multiview{ multiview_ }
      , multiviewGeometryShader{ multiviewGeometryShader_ }
      , multiviewTessellationShader{ multiviewTessellationShader_ }
      , variablePointersStorageBuffer{ variablePointersStorageBuffer_ }
      , variablePointers{ variablePointers_ }
      , protectedMemory{ protectedMemory_ }
      , samplerYcbcrConversion{ samplerYcbcrConversion_ }
      , shaderDrawParameters{ shaderDrawParameters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer16BitAccess = storageBuffer16BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
      setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant16 = storagePushConstant16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
    {
      storageInputOutput16 = storageInputOutput16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
    {
      multiview = multiview_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewGeometryShader = multiviewGeometryShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewTessellationShader = multiviewTessellationShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointersStorageBuffer = variablePointersStorageBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointers = variablePointers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedMemory = protectedMemory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerYcbcrConversion = samplerYcbcrConversion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDrawParameters = shaderDrawParameters_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>( this );
    }

    operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan11Features *>( this );
    }

    operator VkPhysicalDeviceVulkan11Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>( this );
    }

    operator VkPhysicalDeviceVulkan11Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan11Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       storageBuffer16BitAccess,
                       uniformAndStorageBuffer16BitAccess,
                       storagePushConstant16,
                       storageInputOutput16,
                       multiview,
                       multiviewGeometryShader,
                       multiviewTessellationShader,
                       variablePointersStorageBuffer,
                       variablePointers,
                       protectedMemory,
                       samplerYcbcrConversion,
                       shaderDrawParameters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
             ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
             ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) &&
             ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) &&
             ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) &&
             ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) &&
             ( shaderDrawParameters == rhs.shaderDrawParameters );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                              = StructureType::ePhysicalDeviceVulkan11Features;
    void *        pNext                              = {};
    Bool32        storageBuffer16BitAccess           = {};
    Bool32        uniformAndStorageBuffer16BitAccess = {};
    Bool32        storagePushConstant16              = {};
    Bool32        storageInputOutput16               = {};
    Bool32        multiview                          = {};
    Bool32        multiviewGeometryShader            = {};
    Bool32        multiviewTessellationShader        = {};
    Bool32        variablePointersStorageBuffer      = {};
    Bool32        variablePointers                   = {};
    Bool32        protectedMemory                    = {};
    Bool32        samplerYcbcrConversion             = {};
    Bool32        shaderDrawParameters               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan11Features>
  {
    using Type = PhysicalDeviceVulkan11Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
  {
    using Type = PhysicalDeviceVulkan11Features;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan11Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Properties.html
  struct PhysicalDeviceVulkan11Properties
  {
    using NativeType = VkPhysicalDeviceVulkan11Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan11Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_                        = {},
                                                              std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_                        = {},
                                                              std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_                        = {},
                                                              uint32_t                                  deviceNodeMask_                    = {},
                                                              Bool32                                    deviceLUIDValid_                   = {},
                                                              uint32_t                                  subgroupSize_                      = {},
                                                              ShaderStageFlags                          subgroupSupportedStages_           = {},
                                                              SubgroupFeatureFlags                      subgroupSupportedOperations_       = {},
                                                              Bool32                                    subgroupQuadOperationsInAllStages_ = {},
                                                              PointClippingBehavior pointClippingBehavior_     = PointClippingBehavior::eAllClipPlanes,
                                                              uint32_t              maxMultiviewViewCount_     = {},
                                                              uint32_t              maxMultiviewInstanceIndex_ = {},
                                                              Bool32                protectedNoFault_          = {},
                                                              uint32_t              maxPerSetDescriptors_      = {},
                                                              DeviceSize            maxMemoryAllocationSize_   = {},
                                                              void *                pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , deviceUUID{ deviceUUID_ }
      , driverUUID{ driverUUID_ }
      , deviceLUID{ deviceLUID_ }
      , deviceNodeMask{ deviceNodeMask_ }
      , deviceLUIDValid{ deviceLUIDValid_ }
      , subgroupSize{ subgroupSize_ }
      , subgroupSupportedStages{ subgroupSupportedStages_ }
      , subgroupSupportedOperations{ subgroupSupportedOperations_ }
      , subgroupQuadOperationsInAllStages{ subgroupQuadOperationsInAllStages_ }
      , pointClippingBehavior{ pointClippingBehavior_ }
      , maxMultiviewViewCount{ maxMultiviewViewCount_ }
      , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ }
      , protectedNoFault{ protectedNoFault_ }
      , maxPerSetDescriptors{ maxPerSetDescriptors_ }
      , maxMemoryAllocationSize{ maxMemoryAllocationSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan11Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan11Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan11Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &,
               uint32_t const &,
               Bool32 const &,
               uint32_t const &,
               ShaderStageFlags const &,
               SubgroupFeatureFlags const &,
               Bool32 const &,
               PointClippingBehavior const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               uint32_t const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       deviceUUID,
                       driverUUID,
                       deviceLUID,
                       deviceNodeMask,
                       deviceLUIDValid,
                       subgroupSize,
                       subgroupSupportedStages,
                       subgroupSupportedOperations,
                       subgroupQuadOperationsInAllStages,
                       pointClippingBehavior,
                       maxMultiviewViewCount,
                       maxMultiviewInstanceIndex,
                       protectedNoFault,
                       maxPerSetDescriptors,
                       maxMemoryAllocationSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) &&
             ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) &&
             ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) &&
             ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) &&
             ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) &&
             ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) &&
             ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
             ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                             = StructureType::ePhysicalDeviceVulkan11Properties;
    void *                                pNext                             = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID                        = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID                        = {};
    ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID                        = {};
    uint32_t                              deviceNodeMask                    = {};
    Bool32                                deviceLUIDValid                   = {};
    uint32_t                              subgroupSize                      = {};
    ShaderStageFlags                      subgroupSupportedStages           = {};
    SubgroupFeatureFlags                  subgroupSupportedOperations       = {};
    Bool32                                subgroupQuadOperationsInAllStages = {};
    PointClippingBehavior                 pointClippingBehavior             = PointClippingBehavior::eAllClipPlanes;
    uint32_t                              maxMultiviewViewCount             = {};
    uint32_t                              maxMultiviewInstanceIndex         = {};
    Bool32                                protectedNoFault                  = {};
    uint32_t                              maxPerSetDescriptors              = {};
    DeviceSize                            maxMemoryAllocationSize           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan11Properties>
  {
    using Type = PhysicalDeviceVulkan11Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
  {
    using Type = PhysicalDeviceVulkan11Properties;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan12Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Features.html
  struct PhysicalDeviceVulkan12Features
  {
    using NativeType = VkPhysicalDeviceVulkan12Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan12Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( Bool32 samplerMirrorClampToEdge_                           = {},
                                                         Bool32 drawIndirectCount_                                  = {},
                                                         Bool32 storageBuffer8BitAccess_                            = {},
                                                         Bool32 uniformAndStorageBuffer8BitAccess_                  = {},
                                                         Bool32 storagePushConstant8_                               = {},
                                                         Bool32 shaderBufferInt64Atomics_                           = {},
                                                         Bool32 shaderSharedInt64Atomics_                           = {},
                                                         Bool32 shaderFloat16_                                      = {},
                                                         Bool32 shaderInt8_                                         = {},
                                                         Bool32 descriptorIndexing_                                 = {},
                                                         Bool32 shaderInputAttachmentArrayDynamicIndexing_          = {},
                                                         Bool32 shaderUniformTexelBufferArrayDynamicIndexing_       = {},
                                                         Bool32 shaderStorageTexelBufferArrayDynamicIndexing_       = {},
                                                         Bool32 shaderUniformBufferArrayNonUniformIndexing_         = {},
                                                         Bool32 shaderSampledImageArrayNonUniformIndexing_          = {},
                                                         Bool32 shaderStorageBufferArrayNonUniformIndexing_         = {},
                                                         Bool32 shaderStorageImageArrayNonUniformIndexing_          = {},
                                                         Bool32 shaderInputAttachmentArrayNonUniformIndexing_       = {},
                                                         Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_    = {},
                                                         Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_    = {},
                                                         Bool32 descriptorBindingUniformBufferUpdateAfterBind_      = {},
                                                         Bool32 descriptorBindingSampledImageUpdateAfterBind_       = {},
                                                         Bool32 descriptorBindingStorageImageUpdateAfterBind_       = {},
                                                         Bool32 descriptorBindingStorageBufferUpdateAfterBind_      = {},
                                                         Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
                                                         Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
                                                         Bool32 descriptorBindingUpdateUnusedWhilePending_          = {},
                                                         Bool32 descriptorBindingPartiallyBound_                    = {},
                                                         Bool32 descriptorBindingVariableDescriptorCount_           = {},
                                                         Bool32 runtimeDescriptorArray_                             = {},
                                                         Bool32 samplerFilterMinmax_                                = {},
                                                         Bool32 scalarBlockLayout_                                  = {},
                                                         Bool32 imagelessFramebuffer_                               = {},
                                                         Bool32 uniformBufferStandardLayout_                        = {},
                                                         Bool32 shaderSubgroupExtendedTypes_                        = {},
                                                         Bool32 separateDepthStencilLayouts_                        = {},
                                                         Bool32 hostQueryReset_                                     = {},
                                                         Bool32 timelineSemaphore_                                  = {},
                                                         Bool32 bufferDeviceAddress_                                = {},
                                                         Bool32 bufferDeviceAddressCaptureReplay_                   = {},
                                                         Bool32 bufferDeviceAddressMultiDevice_                     = {},
                                                         Bool32 vulkanMemoryModel_                                  = {},
                                                         Bool32 vulkanMemoryModelDeviceScope_                       = {},
                                                         Bool32 vulkanMemoryModelAvailabilityVisibilityChains_      = {},
                                                         Bool32 shaderOutputViewportIndex_                          = {},
                                                         Bool32 shaderOutputLayer_                                  = {},
                                                         Bool32 subgroupBroadcastDynamicId_                         = {},
                                                         void * pNext_                                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , samplerMirrorClampToEdge{ samplerMirrorClampToEdge_ }
      , drawIndirectCount{ drawIndirectCount_ }
      , storageBuffer8BitAccess{ storageBuffer8BitAccess_ }
      , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ }
      , storagePushConstant8{ storagePushConstant8_ }
      , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ }
      , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ }
      , shaderFloat16{ shaderFloat16_ }
      , shaderInt8{ shaderInt8_ }
      , descriptorIndexing{ descriptorIndexing_ }
      , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ }
      , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ }
      , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ }
      , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ }
      , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ }
      , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ }
      , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ }
      , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ }
      , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ }
      , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ }
      , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ }
      , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ }
      , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ }
      , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ }
      , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ }
      , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ }
      , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ }
      , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ }
      , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ }
      , runtimeDescriptorArray{ runtimeDescriptorArray_ }
      , samplerFilterMinmax{ samplerFilterMinmax_ }
      , scalarBlockLayout{ scalarBlockLayout_ }
      , imagelessFramebuffer{ imagelessFramebuffer_ }
      , uniformBufferStandardLayout{ uniformBufferStandardLayout_ }
      , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ }
      , separateDepthStencilLayouts{ separateDepthStencilLayouts_ }
      , hostQueryReset{ hostQueryReset_ }
      , timelineSemaphore{ timelineSemaphore_ }
      , bufferDeviceAddress{ bufferDeviceAddress_ }
      , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ }
      , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ }
      , vulkanMemoryModel{ vulkanMemoryModel_ }
      , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ }
      , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ }
      , shaderOutputViewportIndex{ shaderOutputViewportIndex_ }
      , shaderOutputLayer{ shaderOutputLayer_ }
      , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drawIndirectCount = drawIndirectCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer8BitAccess = storageBuffer8BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant8 = storagePushConstant8_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat16 = shaderFloat16_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt8 = shaderInt8_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorIndexing = descriptorIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
    {
      runtimeDescriptorArray = runtimeDescriptorArray_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerFilterMinmax = samplerFilterMinmax_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      scalarBlockLayout = scalarBlockLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      imagelessFramebuffer = imagelessFramebuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformBufferStandardLayout = uniformBufferStandardLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      separateDepthStencilLayouts = separateDepthStencilLayouts_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
    {
      hostQueryReset = hostQueryReset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      timelineSemaphore = timelineSemaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModel = vulkanMemoryModel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
      setVulkanMemoryModelAvailabilityVisibilityChains( Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderOutputViewportIndex = shaderOutputViewportIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderOutputLayer = shaderOutputLayer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
    {
      subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>( this );
    }

    operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan12Features *>( this );
    }

    operator VkPhysicalDeviceVulkan12Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>( this );
    }

    operator VkPhysicalDeviceVulkan12Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan12Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       samplerMirrorClampToEdge,
                       drawIndirectCount,
                       storageBuffer8BitAccess,
                       uniformAndStorageBuffer8BitAccess,
                       storagePushConstant8,
                       shaderBufferInt64Atomics,
                       shaderSharedInt64Atomics,
                       shaderFloat16,
                       shaderInt8,
                       descriptorIndexing,
                       shaderInputAttachmentArrayDynamicIndexing,
                       shaderUniformTexelBufferArrayDynamicIndexing,
                       shaderStorageTexelBufferArrayDynamicIndexing,
                       shaderUniformBufferArrayNonUniformIndexing,
                       shaderSampledImageArrayNonUniformIndexing,
                       shaderStorageBufferArrayNonUniformIndexing,
                       shaderStorageImageArrayNonUniformIndexing,
                       shaderInputAttachmentArrayNonUniformIndexing,
                       shaderUniformTexelBufferArrayNonUniformIndexing,
                       shaderStorageTexelBufferArrayNonUniformIndexing,
                       descriptorBindingUniformBufferUpdateAfterBind,
                       descriptorBindingSampledImageUpdateAfterBind,
                       descriptorBindingStorageImageUpdateAfterBind,
                       descriptorBindingStorageBufferUpdateAfterBind,
                       descriptorBindingUniformTexelBufferUpdateAfterBind,
                       descriptorBindingStorageTexelBufferUpdateAfterBind,
                       descriptorBindingUpdateUnusedWhilePending,
                       descriptorBindingPartiallyBound,
                       descriptorBindingVariableDescriptorCount,
                       runtimeDescriptorArray,
                       samplerFilterMinmax,
                       scalarBlockLayout,
                       imagelessFramebuffer,
                       uniformBufferStandardLayout,
                       shaderSubgroupExtendedTypes,
                       separateDepthStencilLayouts,
                       hostQueryReset,
                       timelineSemaphore,
                       bufferDeviceAddress,
                       bufferDeviceAddressCaptureReplay,
                       bufferDeviceAddressMultiDevice,
                       vulkanMemoryModel,
                       vulkanMemoryModelDeviceScope,
                       vulkanMemoryModelAvailabilityVisibilityChains,
                       shaderOutputViewportIndex,
                       shaderOutputLayer,
                       subgroupBroadcastDynamicId );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) &&
             ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
             ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) &&
             ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) &&
             ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) &&
             ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
             ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
             ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
             ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
             ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
             ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
             ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
             ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
             ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
             ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
             ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
             ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
             ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
             ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
             ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
             ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
             ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
             ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) &&
             ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) &&
             ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) &&
             ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) &&
             ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
             ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
             ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) &&
             ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) &&
             ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                              = StructureType::ePhysicalDeviceVulkan12Features;
    void *        pNext                                              = {};
    Bool32        samplerMirrorClampToEdge                           = {};
    Bool32        drawIndirectCount                                  = {};
    Bool32        storageBuffer8BitAccess                            = {};
    Bool32        uniformAndStorageBuffer8BitAccess                  = {};
    Bool32        storagePushConstant8                               = {};
    Bool32        shaderBufferInt64Atomics                           = {};
    Bool32        shaderSharedInt64Atomics                           = {};
    Bool32        shaderFloat16                                      = {};
    Bool32        shaderInt8                                         = {};
    Bool32        descriptorIndexing                                 = {};
    Bool32        shaderInputAttachmentArrayDynamicIndexing          = {};
    Bool32        shaderUniformTexelBufferArrayDynamicIndexing       = {};
    Bool32        shaderStorageTexelBufferArrayDynamicIndexing       = {};
    Bool32        shaderUniformBufferArrayNonUniformIndexing         = {};
    Bool32        shaderSampledImageArrayNonUniformIndexing          = {};
    Bool32        shaderStorageBufferArrayNonUniformIndexing         = {};
    Bool32        shaderStorageImageArrayNonUniformIndexing          = {};
    Bool32        shaderInputAttachmentArrayNonUniformIndexing       = {};
    Bool32        shaderUniformTexelBufferArrayNonUniformIndexing    = {};
    Bool32        shaderStorageTexelBufferArrayNonUniformIndexing    = {};
    Bool32        descriptorBindingUniformBufferUpdateAfterBind      = {};
    Bool32        descriptorBindingSampledImageUpdateAfterBind       = {};
    Bool32        descriptorBindingStorageImageUpdateAfterBind       = {};
    Bool32        descriptorBindingStorageBufferUpdateAfterBind      = {};
    Bool32        descriptorBindingUniformTexelBufferUpdateAfterBind = {};
    Bool32        descriptorBindingStorageTexelBufferUpdateAfterBind = {};
    Bool32        descriptorBindingUpdateUnusedWhilePending          = {};
    Bool32        descriptorBindingPartiallyBound                    = {};
    Bool32        descriptorBindingVariableDescriptorCount           = {};
    Bool32        runtimeDescriptorArray                             = {};
    Bool32        samplerFilterMinmax                                = {};
    Bool32        scalarBlockLayout                                  = {};
    Bool32        imagelessFramebuffer                               = {};
    Bool32        uniformBufferStandardLayout                        = {};
    Bool32        shaderSubgroupExtendedTypes                        = {};
    Bool32        separateDepthStencilLayouts                        = {};
    Bool32        hostQueryReset                                     = {};
    Bool32        timelineSemaphore                                  = {};
    Bool32        bufferDeviceAddress                                = {};
    Bool32        bufferDeviceAddressCaptureReplay                   = {};
    Bool32        bufferDeviceAddressMultiDevice                     = {};
    Bool32        vulkanMemoryModel                                  = {};
    Bool32        vulkanMemoryModelDeviceScope                       = {};
    Bool32        vulkanMemoryModelAvailabilityVisibilityChains      = {};
    Bool32        shaderOutputViewportIndex                          = {};
    Bool32        shaderOutputLayer                                  = {};
    Bool32        subgroupBroadcastDynamicId                         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan12Features>
  {
    using Type = PhysicalDeviceVulkan12Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
  {
    using Type = PhysicalDeviceVulkan12Features;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan12Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Properties.html
  struct PhysicalDeviceVulkan12Properties
  {
    using NativeType = VkPhysicalDeviceVulkan12Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan12Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceVulkan12Properties( DriverId                                          driverID_              = DriverId::eAmdProprietary,
                                        std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_            = {},
                                        std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_            = {},
                                        ConformanceVersion                                conformanceVersion_    = {},
                                        ShaderFloatControlsIndependence denormBehaviorIndependence_              = ShaderFloatControlsIndependence::e32BitOnly,
                                        ShaderFloatControlsIndependence roundingModeIndependence_                = ShaderFloatControlsIndependence::e32BitOnly,
                                        Bool32                          shaderSignedZeroInfNanPreserveFloat16_   = {},
                                        Bool32                          shaderSignedZeroInfNanPreserveFloat32_   = {},
                                        Bool32                          shaderSignedZeroInfNanPreserveFloat64_   = {},
                                        Bool32                          shaderDenormPreserveFloat16_             = {},
                                        Bool32                          shaderDenormPreserveFloat32_             = {},
                                        Bool32                          shaderDenormPreserveFloat64_             = {},
                                        Bool32                          shaderDenormFlushToZeroFloat16_          = {},
                                        Bool32                          shaderDenormFlushToZeroFloat32_          = {},
                                        Bool32                          shaderDenormFlushToZeroFloat64_          = {},
                                        Bool32                          shaderRoundingModeRTEFloat16_            = {},
                                        Bool32                          shaderRoundingModeRTEFloat32_            = {},
                                        Bool32                          shaderRoundingModeRTEFloat64_            = {},
                                        Bool32                          shaderRoundingModeRTZFloat16_            = {},
                                        Bool32                          shaderRoundingModeRTZFloat32_            = {},
                                        Bool32                          shaderRoundingModeRTZFloat64_            = {},
                                        uint32_t                        maxUpdateAfterBindDescriptorsInAllPools_ = {},
                                        Bool32                          shaderUniformBufferArrayNonUniformIndexingNative_     = {},
                                        Bool32                          shaderSampledImageArrayNonUniformIndexingNative_      = {},
                                        Bool32                          shaderStorageBufferArrayNonUniformIndexingNative_     = {},
                                        Bool32                          shaderStorageImageArrayNonUniformIndexingNative_      = {},
                                        Bool32                          shaderInputAttachmentArrayNonUniformIndexingNative_   = {},
                                        Bool32                          robustBufferAccessUpdateAfterBind_                    = {},
                                        Bool32                          quadDivergentImplicitLod_                             = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindSamplers_         = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindUniformBuffers_   = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindStorageBuffers_   = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindSampledImages_    = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindStorageImages_    = {},
                                        uint32_t                        maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
                                        uint32_t                        maxPerStageUpdateAfterBindResources_                  = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindSamplers_              = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindUniformBuffers_        = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindStorageBuffers_        = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindSampledImages_         = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindStorageImages_         = {},
                                        uint32_t                        maxDescriptorSetUpdateAfterBindInputAttachments_      = {},
                                        ResolveModeFlags                supportedDepthResolveModes_                           = {},
                                        ResolveModeFlags                supportedStencilResolveModes_                         = {},
                                        Bool32                          independentResolveNone_                               = {},
                                        Bool32                          independentResolve_                                   = {},
                                        Bool32                          filterMinmaxSingleComponentFormats_                   = {},
                                        Bool32                          filterMinmaxImageComponentMapping_                    = {},
                                        uint64_t                        maxTimelineSemaphoreValueDifference_                  = {},
                                        SampleCountFlags                framebufferIntegerColorSampleCounts_                  = {},
                                        void *                          pNext_                                                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , driverID{ driverID_ }
      , driverName{ driverName_ }
      , driverInfo{ driverInfo_ }
      , conformanceVersion{ conformanceVersion_ }
      , denormBehaviorIndependence{ denormBehaviorIndependence_ }
      , roundingModeIndependence{ roundingModeIndependence_ }
      , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ }
      , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ }
      , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ }
      , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ }
      , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ }
      , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ }
      , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ }
      , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ }
      , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ }
      , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ }
      , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ }
      , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ }
      , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ }
      , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ }
      , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ }
      , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ }
      , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ }
      , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ }
      , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ }
      , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ }
      , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ }
      , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ }
      , quadDivergentImplicitLod{ quadDivergentImplicitLod_ }
      , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ }
      , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ }
      , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ }
      , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ }
      , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ }
      , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ }
      , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ }
      , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ }
      , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ }
      , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ }
      , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ }
      , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ }
      , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ }
      , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ }
      , supportedDepthResolveModes{ supportedDepthResolveModes_ }
      , supportedStencilResolveModes{ supportedStencilResolveModes_ }
      , independentResolveNone{ independentResolveNone_ }
      , independentResolve{ independentResolve_ }
      , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ }
      , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ }
      , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ }
      , framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan12Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan12Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan12Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               DriverId const &,
               ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &,
               ConformanceVersion const &,
               ShaderFloatControlsIndependence const &,
               ShaderFloatControlsIndependence const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ResolveModeFlags const &,
               ResolveModeFlags const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint64_t const &,
               SampleCountFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       driverID,
                       driverName,
                       driverInfo,
                       conformanceVersion,
                       denormBehaviorIndependence,
                       roundingModeIndependence,
                       shaderSignedZeroInfNanPreserveFloat16,
                       shaderSignedZeroInfNanPreserveFloat32,
                       shaderSignedZeroInfNanPreserveFloat64,
                       shaderDenormPreserveFloat16,
                       shaderDenormPreserveFloat32,
                       shaderDenormPreserveFloat64,
                       shaderDenormFlushToZeroFloat16,
                       shaderDenormFlushToZeroFloat32,
                       shaderDenormFlushToZeroFloat64,
                       shaderRoundingModeRTEFloat16,
                       shaderRoundingModeRTEFloat32,
                       shaderRoundingModeRTEFloat64,
                       shaderRoundingModeRTZFloat16,
                       shaderRoundingModeRTZFloat32,
                       shaderRoundingModeRTZFloat64,
                       maxUpdateAfterBindDescriptorsInAllPools,
                       shaderUniformBufferArrayNonUniformIndexingNative,
                       shaderSampledImageArrayNonUniformIndexingNative,
                       shaderStorageBufferArrayNonUniformIndexingNative,
                       shaderStorageImageArrayNonUniformIndexingNative,
                       shaderInputAttachmentArrayNonUniformIndexingNative,
                       robustBufferAccessUpdateAfterBind,
                       quadDivergentImplicitLod,
                       maxPerStageDescriptorUpdateAfterBindSamplers,
                       maxPerStageDescriptorUpdateAfterBindUniformBuffers,
                       maxPerStageDescriptorUpdateAfterBindStorageBuffers,
                       maxPerStageDescriptorUpdateAfterBindSampledImages,
                       maxPerStageDescriptorUpdateAfterBindStorageImages,
                       maxPerStageDescriptorUpdateAfterBindInputAttachments,
                       maxPerStageUpdateAfterBindResources,
                       maxDescriptorSetUpdateAfterBindSamplers,
                       maxDescriptorSetUpdateAfterBindUniformBuffers,
                       maxDescriptorSetUpdateAfterBindUniformBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindStorageBuffers,
                       maxDescriptorSetUpdateAfterBindStorageBuffersDynamic,
                       maxDescriptorSetUpdateAfterBindSampledImages,
                       maxDescriptorSetUpdateAfterBindStorageImages,
                       maxDescriptorSetUpdateAfterBindInputAttachments,
                       supportedDepthResolveModes,
                       supportedStencilResolveModes,
                       independentResolveNone,
                       independentResolve,
                       filterMinmaxSingleComponentFormats,
                       filterMinmaxImageComponentMapping,
                       maxTimelineSemaphoreValueDifference,
                       framebufferIntegerColorSampleCounts );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
        return cmp;
      if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 )
        return cmp;
      if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 )
        return cmp;
      if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 )
        return cmp;
      if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 )
        return cmp;
      if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 )
        return cmp;
      if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 )
        return cmp;
      if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 )
        return cmp;
      if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 )
        return cmp;
      if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 )
        return cmp;
      if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 )
        return cmp;
      if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 )
        return cmp;
      if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 )
        return cmp;
      if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 )
        return cmp;
      if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
             ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) &&
             ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
             ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
             ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
             ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
             ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
             ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
             ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
             ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
             ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
             ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
             ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) &&
             ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
             ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
             ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
             ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
             ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
             ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
             ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
             ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
             ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
             ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
             ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
             ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
             ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) &&
             ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
             ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) &&
             ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
             ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) &&
             ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) &&
             ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
    }

    bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType                                                = StructureType::ePhysicalDeviceVulkan12Properties;
    void *                                        pNext                                                = {};
    DriverId                                      driverID                                             = DriverId::eAmdProprietary;
    ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName                                           = {};
    ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo                                           = {};
    ConformanceVersion                            conformanceVersion                                   = {};
    ShaderFloatControlsIndependence               denormBehaviorIndependence                           = ShaderFloatControlsIndependence::e32BitOnly;
    ShaderFloatControlsIndependence               roundingModeIndependence                             = ShaderFloatControlsIndependence::e32BitOnly;
    Bool32                                        shaderSignedZeroInfNanPreserveFloat16                = {};
    Bool32                                        shaderSignedZeroInfNanPreserveFloat32                = {};
    Bool32                                        shaderSignedZeroInfNanPreserveFloat64                = {};
    Bool32                                        shaderDenormPreserveFloat16                          = {};
    Bool32                                        shaderDenormPreserveFloat32                          = {};
    Bool32                                        shaderDenormPreserveFloat64                          = {};
    Bool32                                        shaderDenormFlushToZeroFloat16                       = {};
    Bool32                                        shaderDenormFlushToZeroFloat32                       = {};
    Bool32                                        shaderDenormFlushToZeroFloat64                       = {};
    Bool32                                        shaderRoundingModeRTEFloat16                         = {};
    Bool32                                        shaderRoundingModeRTEFloat32                         = {};
    Bool32                                        shaderRoundingModeRTEFloat64                         = {};
    Bool32                                        shaderRoundingModeRTZFloat16                         = {};
    Bool32                                        shaderRoundingModeRTZFloat32                         = {};
    Bool32                                        shaderRoundingModeRTZFloat64                         = {};
    uint32_t                                      maxUpdateAfterBindDescriptorsInAllPools              = {};
    Bool32                                        shaderUniformBufferArrayNonUniformIndexingNative     = {};
    Bool32                                        shaderSampledImageArrayNonUniformIndexingNative      = {};
    Bool32                                        shaderStorageBufferArrayNonUniformIndexingNative     = {};
    Bool32                                        shaderStorageImageArrayNonUniformIndexingNative      = {};
    Bool32                                        shaderInputAttachmentArrayNonUniformIndexingNative   = {};
    Bool32                                        robustBufferAccessUpdateAfterBind                    = {};
    Bool32                                        quadDivergentImplicitLod                             = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindSamplers         = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindUniformBuffers   = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindStorageBuffers   = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindSampledImages    = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindStorageImages    = {};
    uint32_t                                      maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
    uint32_t                                      maxPerStageUpdateAfterBindResources                  = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindSamplers              = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindUniformBuffers        = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindStorageBuffers        = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindSampledImages         = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindStorageImages         = {};
    uint32_t                                      maxDescriptorSetUpdateAfterBindInputAttachments      = {};
    ResolveModeFlags                              supportedDepthResolveModes                           = {};
    ResolveModeFlags                              supportedStencilResolveModes                         = {};
    Bool32                                        independentResolveNone                               = {};
    Bool32                                        independentResolve                                   = {};
    Bool32                                        filterMinmaxSingleComponentFormats                   = {};
    Bool32                                        filterMinmaxImageComponentMapping                    = {};
    uint64_t                                      maxTimelineSemaphoreValueDifference                  = {};
    SampleCountFlags                              framebufferIntegerColorSampleCounts                  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan12Properties>
  {
    using Type = PhysicalDeviceVulkan12Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
  {
    using Type = PhysicalDeviceVulkan12Properties;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan13Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Features.html
  struct PhysicalDeviceVulkan13Features
  {
    using NativeType = VkPhysicalDeviceVulkan13Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan13Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( Bool32 robustImageAccess_                                  = {},
                                                         Bool32 inlineUniformBlock_                                 = {},
                                                         Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {},
                                                         Bool32 pipelineCreationCacheControl_                       = {},
                                                         Bool32 privateData_                                        = {},
                                                         Bool32 shaderDemoteToHelperInvocation_                     = {},
                                                         Bool32 shaderTerminateInvocation_                          = {},
                                                         Bool32 subgroupSizeControl_                                = {},
                                                         Bool32 computeFullSubgroups_                               = {},
                                                         Bool32 synchronization2_                                   = {},
                                                         Bool32 textureCompressionASTC_HDR_                         = {},
                                                         Bool32 shaderZeroInitializeWorkgroupMemory_                = {},
                                                         Bool32 dynamicRendering_                                   = {},
                                                         Bool32 shaderIntegerDotProduct_                            = {},
                                                         Bool32 maintenance4_                                       = {},
                                                         void * pNext_                                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , robustImageAccess{ robustImageAccess_ }
      , inlineUniformBlock{ inlineUniformBlock_ }
      , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ }
      , pipelineCreationCacheControl{ pipelineCreationCacheControl_ }
      , privateData{ privateData_ }
      , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ }
      , shaderTerminateInvocation{ shaderTerminateInvocation_ }
      , subgroupSizeControl{ subgroupSizeControl_ }
      , computeFullSubgroups{ computeFullSubgroups_ }
      , synchronization2{ synchronization2_ }
      , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ }
      , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ }
      , dynamicRendering{ dynamicRendering_ }
      , shaderIntegerDotProduct{ shaderIntegerDotProduct_ }
      , maintenance4{ maintenance4_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      robustImageAccess = robustImageAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
    {
      inlineUniformBlock = inlineUniformBlock_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
      setDescriptorBindingInlineUniformBlockUpdateAfterBind( Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPipelineCreationCacheControl( Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCreationCacheControl = pipelineCreationCacheControl_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
    {
      privateData = privateData_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation( Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderTerminateInvocation( Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTerminateInvocation = shaderTerminateInvocation_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
    {
      subgroupSizeControl = subgroupSizeControl_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
    {
      computeFullSubgroups = computeFullSubgroups_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
    {
      synchronization2 = synchronization2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setTextureCompressionASTC_HDR( Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &
      setShaderZeroInitializeWorkgroupMemory( Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicRendering = dynamicRendering_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderIntegerDotProduct( Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderIntegerDotProduct = shaderIntegerDotProduct_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance4 = maintenance4_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>( this );
    }

    operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan13Features *>( this );
    }

    operator VkPhysicalDeviceVulkan13Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>( this );
    }

    operator VkPhysicalDeviceVulkan13Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan13Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       robustImageAccess,
                       inlineUniformBlock,
                       descriptorBindingInlineUniformBlockUpdateAfterBind,
                       pipelineCreationCacheControl,
                       privateData,
                       shaderDemoteToHelperInvocation,
                       shaderTerminateInvocation,
                       subgroupSizeControl,
                       computeFullSubgroups,
                       synchronization2,
                       textureCompressionASTC_HDR,
                       shaderZeroInitializeWorkgroupMemory,
                       dynamicRendering,
                       shaderIntegerDotProduct,
                       maintenance4 );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) &&
             ( inlineUniformBlock == rhs.inlineUniformBlock ) &&
             ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) &&
             ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) &&
             ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) &&
             ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) &&
             ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) &&
             ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) &&
             ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                              = StructureType::ePhysicalDeviceVulkan13Features;
    void *        pNext                                              = {};
    Bool32        robustImageAccess                                  = {};
    Bool32        inlineUniformBlock                                 = {};
    Bool32        descriptorBindingInlineUniformBlockUpdateAfterBind = {};
    Bool32        pipelineCreationCacheControl                       = {};
    Bool32        privateData                                        = {};
    Bool32        shaderDemoteToHelperInvocation                     = {};
    Bool32        shaderTerminateInvocation                          = {};
    Bool32        subgroupSizeControl                                = {};
    Bool32        computeFullSubgroups                               = {};
    Bool32        synchronization2                                   = {};
    Bool32        textureCompressionASTC_HDR                         = {};
    Bool32        shaderZeroInitializeWorkgroupMemory                = {};
    Bool32        dynamicRendering                                   = {};
    Bool32        shaderIntegerDotProduct                            = {};
    Bool32        maintenance4                                       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan13Features>
  {
    using Type = PhysicalDeviceVulkan13Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
  {
    using Type = PhysicalDeviceVulkan13Features;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan13Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Properties.html
  struct PhysicalDeviceVulkan13Properties
  {
    using NativeType = VkPhysicalDeviceVulkan13Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan13Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( uint32_t         minSubgroupSize_                                                               = {},
                                                           uint32_t         maxSubgroupSize_                                                               = {},
                                                           uint32_t         maxComputeWorkgroupSubgroups_                                                  = {},
                                                           ShaderStageFlags requiredSubgroupSizeStages_                                                    = {},
                                                           uint32_t         maxInlineUniformBlockSize_                                                     = {},
                                                           uint32_t         maxPerStageDescriptorInlineUniformBlocks_                                      = {},
                                                           uint32_t         maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_                       = {},
                                                           uint32_t         maxDescriptorSetInlineUniformBlocks_                                           = {},
                                                           uint32_t         maxDescriptorSetUpdateAfterBindInlineUniformBlocks_                            = {},
                                                           uint32_t         maxInlineUniformTotalSize_                                                     = {},
                                                           Bool32           integerDotProduct8BitUnsignedAccelerated_                                      = {},
                                                           Bool32           integerDotProduct8BitSignedAccelerated_                                        = {},
                                                           Bool32           integerDotProduct8BitMixedSignednessAccelerated_                               = {},
                                                           Bool32           integerDotProduct4x8BitPackedUnsignedAccelerated_                              = {},
                                                           Bool32           integerDotProduct4x8BitPackedSignedAccelerated_                                = {},
                                                           Bool32           integerDotProduct4x8BitPackedMixedSignednessAccelerated_                       = {},
                                                           Bool32           integerDotProduct16BitUnsignedAccelerated_                                     = {},
                                                           Bool32           integerDotProduct16BitSignedAccelerated_                                       = {},
                                                           Bool32           integerDotProduct16BitMixedSignednessAccelerated_                              = {},
                                                           Bool32           integerDotProduct32BitUnsignedAccelerated_                                     = {},
                                                           Bool32           integerDotProduct32BitSignedAccelerated_                                       = {},
                                                           Bool32           integerDotProduct32BitMixedSignednessAccelerated_                              = {},
                                                           Bool32           integerDotProduct64BitUnsignedAccelerated_                                     = {},
                                                           Bool32           integerDotProduct64BitSignedAccelerated_                                       = {},
                                                           Bool32           integerDotProduct64BitMixedSignednessAccelerated_                              = {},
                                                           Bool32           integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_                = {},
                                                           Bool32           integerDotProductAccumulatingSaturating8BitSignedAccelerated_                  = {},
                                                           Bool32           integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_         = {},
                                                           Bool32           integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_        = {},
                                                           Bool32           integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_          = {},
                                                           Bool32           integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {},
                                                           Bool32           integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_               = {},
                                                           Bool32           integerDotProductAccumulatingSaturating16BitSignedAccelerated_                 = {},
                                                           Bool32           integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_        = {},
                                                           Bool32           integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_               = {},
                                                           Bool32           integerDotProductAccumulatingSaturating32BitSignedAccelerated_                 = {},
                                                           Bool32           integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_        = {},
                                                           Bool32           integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_               = {},
                                                           Bool32           integerDotProductAccumulatingSaturating64BitSignedAccelerated_                 = {},
                                                           Bool32           integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_        = {},
                                                           DeviceSize       storageTexelBufferOffsetAlignmentBytes_                                        = {},
                                                           Bool32           storageTexelBufferOffsetSingleTexelAlignment_                                  = {},
                                                           DeviceSize       uniformTexelBufferOffsetAlignmentBytes_                                        = {},
                                                           Bool32           uniformTexelBufferOffsetSingleTexelAlignment_                                  = {},
                                                           DeviceSize       maxBufferSize_                                                                 = {},
                                                           void *           pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minSubgroupSize{ minSubgroupSize_ }
      , maxSubgroupSize{ maxSubgroupSize_ }
      , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ }
      , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ }
      , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ }
      , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ }
      , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ }
      , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ }
      , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ }
      , maxInlineUniformTotalSize{ maxInlineUniformTotalSize_ }
      , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ }
      , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ }
      , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ }
      , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ }
      , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ }
      , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ }
      , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ }
      , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ }
      , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ }
      , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ }
      , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ }
      , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ }
      , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ }
      , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ }
      , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ }
      , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ }
      , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ }
      , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ }
      , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ }
      , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ }
      , maxBufferSize{ maxBufferSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan13Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan13Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan13Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ShaderStageFlags const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               DeviceSize const &,
               Bool32 const &,
               DeviceSize const &,
               Bool32 const &,
               DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       minSubgroupSize,
                       maxSubgroupSize,
                       maxComputeWorkgroupSubgroups,
                       requiredSubgroupSizeStages,
                       maxInlineUniformBlockSize,
                       maxPerStageDescriptorInlineUniformBlocks,
                       maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks,
                       maxDescriptorSetInlineUniformBlocks,
                       maxDescriptorSetUpdateAfterBindInlineUniformBlocks,
                       maxInlineUniformTotalSize,
                       integerDotProduct8BitUnsignedAccelerated,
                       integerDotProduct8BitSignedAccelerated,
                       integerDotProduct8BitMixedSignednessAccelerated,
                       integerDotProduct4x8BitPackedUnsignedAccelerated,
                       integerDotProduct4x8BitPackedSignedAccelerated,
                       integerDotProduct4x8BitPackedMixedSignednessAccelerated,
                       integerDotProduct16BitUnsignedAccelerated,
                       integerDotProduct16BitSignedAccelerated,
                       integerDotProduct16BitMixedSignednessAccelerated,
                       integerDotProduct32BitUnsignedAccelerated,
                       integerDotProduct32BitSignedAccelerated,
                       integerDotProduct32BitMixedSignednessAccelerated,
                       integerDotProduct64BitUnsignedAccelerated,
                       integerDotProduct64BitSignedAccelerated,
                       integerDotProduct64BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating8BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating8BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated,
                       integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating16BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating16BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating32BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating32BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated,
                       integerDotProductAccumulatingSaturating64BitUnsignedAccelerated,
                       integerDotProductAccumulatingSaturating64BitSignedAccelerated,
                       integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated,
                       storageTexelBufferOffsetAlignmentBytes,
                       storageTexelBufferOffsetSingleTexelAlignment,
                       uniformTexelBufferOffsetAlignmentBytes,
                       uniformTexelBufferOffsetSingleTexelAlignment,
                       maxBufferSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
             ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) &&
             ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
             ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
             ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
             ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
             ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) &&
             ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) &&
             ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) &&
             ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) &&
             ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) &&
             ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) &&
             ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) &&
             ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) &&
             ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) &&
             ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) &&
             ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) &&
             ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) &&
             ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) &&
             ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) &&
             ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) &&
             ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) &&
             ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) &&
             ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ==
               rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) &&
             ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
             ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
             ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
             ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                                                                         = StructureType::ePhysicalDeviceVulkan13Properties;
    void *           pNext                                                                         = {};
    uint32_t         minSubgroupSize                                                               = {};
    uint32_t         maxSubgroupSize                                                               = {};
    uint32_t         maxComputeWorkgroupSubgroups                                                  = {};
    ShaderStageFlags requiredSubgroupSizeStages                                                    = {};
    uint32_t         maxInlineUniformBlockSize                                                     = {};
    uint32_t         maxPerStageDescriptorInlineUniformBlocks                                      = {};
    uint32_t         maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks                       = {};
    uint32_t         maxDescriptorSetInlineUniformBlocks                                           = {};
    uint32_t         maxDescriptorSetUpdateAfterBindInlineUniformBlocks                            = {};
    uint32_t         maxInlineUniformTotalSize                                                     = {};
    Bool32           integerDotProduct8BitUnsignedAccelerated                                      = {};
    Bool32           integerDotProduct8BitSignedAccelerated                                        = {};
    Bool32           integerDotProduct8BitMixedSignednessAccelerated                               = {};
    Bool32           integerDotProduct4x8BitPackedUnsignedAccelerated                              = {};
    Bool32           integerDotProduct4x8BitPackedSignedAccelerated                                = {};
    Bool32           integerDotProduct4x8BitPackedMixedSignednessAccelerated                       = {};
    Bool32           integerDotProduct16BitUnsignedAccelerated                                     = {};
    Bool32           integerDotProduct16BitSignedAccelerated                                       = {};
    Bool32           integerDotProduct16BitMixedSignednessAccelerated                              = {};
    Bool32           integerDotProduct32BitUnsignedAccelerated                                     = {};
    Bool32           integerDotProduct32BitSignedAccelerated                                       = {};
    Bool32           integerDotProduct32BitMixedSignednessAccelerated                              = {};
    Bool32           integerDotProduct64BitUnsignedAccelerated                                     = {};
    Bool32           integerDotProduct64BitSignedAccelerated                                       = {};
    Bool32           integerDotProduct64BitMixedSignednessAccelerated                              = {};
    Bool32           integerDotProductAccumulatingSaturating8BitUnsignedAccelerated                = {};
    Bool32           integerDotProductAccumulatingSaturating8BitSignedAccelerated                  = {};
    Bool32           integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated         = {};
    Bool32           integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated        = {};
    Bool32           integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated          = {};
    Bool32           integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
    Bool32           integerDotProductAccumulatingSaturating16BitUnsignedAccelerated               = {};
    Bool32           integerDotProductAccumulatingSaturating16BitSignedAccelerated                 = {};
    Bool32           integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated        = {};
    Bool32           integerDotProductAccumulatingSaturating32BitUnsignedAccelerated               = {};
    Bool32           integerDotProductAccumulatingSaturating32BitSignedAccelerated                 = {};
    Bool32           integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated        = {};
    Bool32           integerDotProductAccumulatingSaturating64BitUnsignedAccelerated               = {};
    Bool32           integerDotProductAccumulatingSaturating64BitSignedAccelerated                 = {};
    Bool32           integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated        = {};
    DeviceSize       storageTexelBufferOffsetAlignmentBytes                                        = {};
    Bool32           storageTexelBufferOffsetSingleTexelAlignment                                  = {};
    DeviceSize       uniformTexelBufferOffsetAlignmentBytes                                        = {};
    Bool32           uniformTexelBufferOffsetSingleTexelAlignment                                  = {};
    DeviceSize       maxBufferSize                                                                 = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan13Properties>
  {
    using Type = PhysicalDeviceVulkan13Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
  {
    using Type = PhysicalDeviceVulkan13Properties;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan14Features, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Features.html
  struct PhysicalDeviceVulkan14Features
  {
    using NativeType = VkPhysicalDeviceVulkan14Features;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan14Features;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( Bool32 globalPriorityQuery_                    = {},
                                                         Bool32 shaderSubgroupRotate_                   = {},
                                                         Bool32 shaderSubgroupRotateClustered_          = {},
                                                         Bool32 shaderFloatControls2_                   = {},
                                                         Bool32 shaderExpectAssume_                     = {},
                                                         Bool32 rectangularLines_                       = {},
                                                         Bool32 bresenhamLines_                         = {},
                                                         Bool32 smoothLines_                            = {},
                                                         Bool32 stippledRectangularLines_               = {},
                                                         Bool32 stippledBresenhamLines_                 = {},
                                                         Bool32 stippledSmoothLines_                    = {},
                                                         Bool32 vertexAttributeInstanceRateDivisor_     = {},
                                                         Bool32 vertexAttributeInstanceRateZeroDivisor_ = {},
                                                         Bool32 indexTypeUint8_                         = {},
                                                         Bool32 dynamicRenderingLocalRead_              = {},
                                                         Bool32 maintenance5_                           = {},
                                                         Bool32 maintenance6_                           = {},
                                                         Bool32 pipelineProtectedAccess_                = {},
                                                         Bool32 pipelineRobustness_                     = {},
                                                         Bool32 hostImageCopy_                          = {},
                                                         Bool32 pushDescriptor_                         = {},
                                                         void * pNext_                                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , globalPriorityQuery{ globalPriorityQuery_ }
      , shaderSubgroupRotate{ shaderSubgroupRotate_ }
      , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ }
      , shaderFloatControls2{ shaderFloatControls2_ }
      , shaderExpectAssume{ shaderExpectAssume_ }
      , rectangularLines{ rectangularLines_ }
      , bresenhamLines{ bresenhamLines_ }
      , smoothLines{ smoothLines_ }
      , stippledRectangularLines{ stippledRectangularLines_ }
      , stippledBresenhamLines{ stippledBresenhamLines_ }
      , stippledSmoothLines{ stippledSmoothLines_ }
      , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ }
      , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ }
      , indexTypeUint8{ indexTypeUint8_ }
      , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ }
      , maintenance5{ maintenance5_ }
      , maintenance6{ maintenance6_ }
      , pipelineProtectedAccess{ pipelineProtectedAccess_ }
      , pipelineRobustness{ pipelineRobustness_ }
      , hostImageCopy{ hostImageCopy_ }
      , pushDescriptor{ pushDescriptor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan14Features( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan14Features( *reinterpret_cast<PhysicalDeviceVulkan14Features const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan14Features & operator=( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan14Features & operator=( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan14Features const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setGlobalPriorityQuery( Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      globalPriorityQuery = globalPriorityQuery_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotate( Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupRotate = shaderSubgroupRotate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotateClustered( Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloatControls2 = shaderFloatControls2_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderExpectAssume = shaderExpectAssume_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setRectangularLines( Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangularLines = rectangularLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      bresenhamLines = bresenhamLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      smoothLines = smoothLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledRectangularLines( Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledRectangularLines = stippledRectangularLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledBresenhamLines( Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledBresenhamLines = stippledBresenhamLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledSmoothLines( Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledSmoothLines = stippledSmoothLines_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features &
      setVertexAttributeInstanceRateDivisor( Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features &
      setVertexAttributeInstanceRateZeroDivisor( Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setIndexTypeUint8( Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeUint8 = indexTypeUint8_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setDynamicRenderingLocalRead( Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicRenderingLocalRead = dynamicRenderingLocalRead_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance5( Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance5 = maintenance5_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance6( Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT
    {
      maintenance6 = maintenance6_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineProtectedAccess( Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineProtectedAccess = pipelineProtectedAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineRobustness( Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineRobustness = pipelineRobustness_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setHostImageCopy( Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT
    {
      hostImageCopy = hostImageCopy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPushDescriptor( Bool32 pushDescriptor_ ) VULKAN_HPP_NOEXCEPT
    {
      pushDescriptor = pushDescriptor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVulkan14Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan14Features *>( this );
    }

    operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan14Features *>( this );
    }

    operator VkPhysicalDeviceVulkan14Features const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan14Features *>( this );
    }

    operator VkPhysicalDeviceVulkan14Features *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan14Features *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       globalPriorityQuery,
                       shaderSubgroupRotate,
                       shaderSubgroupRotateClustered,
                       shaderFloatControls2,
                       shaderExpectAssume,
                       rectangularLines,
                       bresenhamLines,
                       smoothLines,
                       stippledRectangularLines,
                       stippledBresenhamLines,
                       stippledSmoothLines,
                       vertexAttributeInstanceRateDivisor,
                       vertexAttributeInstanceRateZeroDivisor,
                       indexTypeUint8,
                       dynamicRenderingLocalRead,
                       maintenance5,
                       maintenance6,
                       pipelineProtectedAccess,
                       pipelineRobustness,
                       hostImageCopy,
                       pushDescriptor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan14Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ) &&
             ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ) &&
             ( shaderFloatControls2 == rhs.shaderFloatControls2 ) && ( shaderExpectAssume == rhs.shaderExpectAssume ) &&
             ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) &&
             ( stippledRectangularLines == rhs.stippledRectangularLines ) && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) &&
             ( stippledSmoothLines == rhs.stippledSmoothLines ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) &&
             ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ) && ( indexTypeUint8 == rhs.indexTypeUint8 ) &&
             ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ) && ( maintenance5 == rhs.maintenance5 ) && ( maintenance6 == rhs.maintenance6 ) &&
             ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ) && ( pipelineRobustness == rhs.pipelineRobustness ) &&
             ( hostImageCopy == rhs.hostImageCopy ) && ( pushDescriptor == rhs.pushDescriptor );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                  = StructureType::ePhysicalDeviceVulkan14Features;
    void *        pNext                                  = {};
    Bool32        globalPriorityQuery                    = {};
    Bool32        shaderSubgroupRotate                   = {};
    Bool32        shaderSubgroupRotateClustered          = {};
    Bool32        shaderFloatControls2                   = {};
    Bool32        shaderExpectAssume                     = {};
    Bool32        rectangularLines                       = {};
    Bool32        bresenhamLines                         = {};
    Bool32        smoothLines                            = {};
    Bool32        stippledRectangularLines               = {};
    Bool32        stippledBresenhamLines                 = {};
    Bool32        stippledSmoothLines                    = {};
    Bool32        vertexAttributeInstanceRateDivisor     = {};
    Bool32        vertexAttributeInstanceRateZeroDivisor = {};
    Bool32        indexTypeUint8                         = {};
    Bool32        dynamicRenderingLocalRead              = {};
    Bool32        maintenance5                           = {};
    Bool32        maintenance6                           = {};
    Bool32        pipelineProtectedAccess                = {};
    Bool32        pipelineRobustness                     = {};
    Bool32        hostImageCopy                          = {};
    Bool32        pushDescriptor                         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan14Features>
  {
    using Type = PhysicalDeviceVulkan14Features;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan14Features>
  {
    using Type = PhysicalDeviceVulkan14Features;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkan14Properties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Properties.html
  struct PhysicalDeviceVulkan14Properties
  {
    using NativeType = VkPhysicalDeviceVulkan14Properties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkan14Properties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceVulkan14Properties( uint32_t                         lineSubPixelPrecisionBits_                           = {},
                                        uint32_t                         maxVertexAttribDivisor_                              = {},
                                        Bool32                           supportsNonZeroFirstInstance_                        = {},
                                        uint32_t                         maxPushDescriptors_                                  = {},
                                        Bool32                           dynamicRenderingLocalReadDepthStencilAttachments_    = {},
                                        Bool32                           dynamicRenderingLocalReadMultisampledAttachments_    = {},
                                        Bool32                           earlyFragmentMultisampleCoverageAfterSampleCounting_ = {},
                                        Bool32                           earlyFragmentSampleMaskTestBeforeSampleCounting_     = {},
                                        Bool32                           depthStencilSwizzleOneSupport_                       = {},
                                        Bool32                           polygonModePointSize_                                = {},
                                        Bool32                           nonStrictSinglePixelWideLinesUseParallelogram_       = {},
                                        Bool32                           nonStrictWideLinesUseParallelogram_                  = {},
                                        Bool32                           blockTexelViewCompatibleMultipleLayers_              = {},
                                        uint32_t                         maxCombinedImageSamplerDescriptorCount_              = {},
                                        Bool32                           fragmentShadingRateClampCombinerInputs_              = {},
                                        PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_  = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                        PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_  = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                        PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_    = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                        PipelineRobustnessImageBehavior  defaultRobustnessImages_          = PipelineRobustnessImageBehavior::eDeviceDefault,
                                        uint32_t                         copySrcLayoutCount_               = {},
                                        ImageLayout *                    pCopySrcLayouts_                  = {},
                                        uint32_t                         copyDstLayoutCount_               = {},
                                        ImageLayout *                    pCopyDstLayouts_                  = {},
                                        std::array<uint8_t, VK_UUID_SIZE> const & optimalTilingLayoutUUID_ = {},
                                        Bool32                                    identicalMemoryTypeRequirements_ = {},
                                        void *                                    pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ }
      , maxVertexAttribDivisor{ maxVertexAttribDivisor_ }
      , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ }
      , maxPushDescriptors{ maxPushDescriptors_ }
      , dynamicRenderingLocalReadDepthStencilAttachments{ dynamicRenderingLocalReadDepthStencilAttachments_ }
      , dynamicRenderingLocalReadMultisampledAttachments{ dynamicRenderingLocalReadMultisampledAttachments_ }
      , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ }
      , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ }
      , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ }
      , polygonModePointSize{ polygonModePointSize_ }
      , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ }
      , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ }
      , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ }
      , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ }
      , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ }
      , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ }
      , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ }
      , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ }
      , defaultRobustnessImages{ defaultRobustnessImages_ }
      , copySrcLayoutCount{ copySrcLayoutCount_ }
      , pCopySrcLayouts{ pCopySrcLayouts_ }
      , copyDstLayoutCount{ copyDstLayoutCount_ }
      , pCopyDstLayouts{ pCopyDstLayouts_ }
      , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ }
      , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan14Properties( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan14Properties( *reinterpret_cast<PhysicalDeviceVulkan14Properties const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkan14Properties & operator=( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkan14Properties & operator=( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkan14Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan14Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan14Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan14Properties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkan14Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan14Properties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkan14Properties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               Bool32 const &,
               uint32_t const &,
               Bool32 const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessImageBehavior const &,
               uint32_t const &,
               ImageLayout * const &,
               uint32_t const &,
               ImageLayout * const &,
               ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       lineSubPixelPrecisionBits,
                       maxVertexAttribDivisor,
                       supportsNonZeroFirstInstance,
                       maxPushDescriptors,
                       dynamicRenderingLocalReadDepthStencilAttachments,
                       dynamicRenderingLocalReadMultisampledAttachments,
                       earlyFragmentMultisampleCoverageAfterSampleCounting,
                       earlyFragmentSampleMaskTestBeforeSampleCounting,
                       depthStencilSwizzleOneSupport,
                       polygonModePointSize,
                       nonStrictSinglePixelWideLinesUseParallelogram,
                       nonStrictWideLinesUseParallelogram,
                       blockTexelViewCompatibleMultipleLayers,
                       maxCombinedImageSamplerDescriptorCount,
                       fragmentShadingRateClampCombinerInputs,
                       defaultRobustnessStorageBuffers,
                       defaultRobustnessUniformBuffers,
                       defaultRobustnessVertexInputs,
                       defaultRobustnessImages,
                       copySrcLayoutCount,
                       pCopySrcLayouts,
                       copyDstLayoutCount,
                       pCopyDstLayouts,
                       optimalTilingLayoutUUID,
                       identicalMemoryTypeRequirements );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan14Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ) &&
             ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ) &&
             ( maxPushDescriptors == rhs.maxPushDescriptors ) &&
             ( dynamicRenderingLocalReadDepthStencilAttachments == rhs.dynamicRenderingLocalReadDepthStencilAttachments ) &&
             ( dynamicRenderingLocalReadMultisampledAttachments == rhs.dynamicRenderingLocalReadMultisampledAttachments ) &&
             ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) &&
             ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) &&
             ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) &&
             ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) &&
             ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ) &&
             ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) &&
             ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) &&
             ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ) &&
             ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) &&
             ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) &&
             ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ) &&
             ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && ( pCopySrcLayouts == rhs.pCopySrcLayouts ) &&
             ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) &&
             ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                                               = StructureType::ePhysicalDeviceVulkan14Properties;
    void *                                pNext                                               = {};
    uint32_t                              lineSubPixelPrecisionBits                           = {};
    uint32_t                              maxVertexAttribDivisor                              = {};
    Bool32                                supportsNonZeroFirstInstance                        = {};
    uint32_t                              maxPushDescriptors                                  = {};
    Bool32                                dynamicRenderingLocalReadDepthStencilAttachments    = {};
    Bool32                                dynamicRenderingLocalReadMultisampledAttachments    = {};
    Bool32                                earlyFragmentMultisampleCoverageAfterSampleCounting = {};
    Bool32                                earlyFragmentSampleMaskTestBeforeSampleCounting     = {};
    Bool32                                depthStencilSwizzleOneSupport                       = {};
    Bool32                                polygonModePointSize                                = {};
    Bool32                                nonStrictSinglePixelWideLinesUseParallelogram       = {};
    Bool32                                nonStrictWideLinesUseParallelogram                  = {};
    Bool32                                blockTexelViewCompatibleMultipleLayers              = {};
    uint32_t                              maxCombinedImageSamplerDescriptorCount              = {};
    Bool32                                fragmentShadingRateClampCombinerInputs              = {};
    PipelineRobustnessBufferBehavior      defaultRobustnessStorageBuffers                     = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior      defaultRobustnessUniformBuffers                     = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior      defaultRobustnessVertexInputs                       = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessImageBehavior       defaultRobustnessImages                             = PipelineRobustnessImageBehavior::eDeviceDefault;
    uint32_t                              copySrcLayoutCount                                  = {};
    ImageLayout *                         pCopySrcLayouts                                     = {};
    uint32_t                              copyDstLayoutCount                                  = {};
    ImageLayout *                         pCopyDstLayouts                                     = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> optimalTilingLayoutUUID                             = {};
    Bool32                                identicalMemoryTypeRequirements                     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkan14Properties>
  {
    using Type = PhysicalDeviceVulkan14Properties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan14Properties>
  {
    using Type = PhysicalDeviceVulkan14Properties;
  };

  // wrapper struct for struct VkPhysicalDeviceVulkanMemoryModelFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkanMemoryModelFeatures.html
  struct PhysicalDeviceVulkanMemoryModelFeatures
  {
    using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( Bool32 vulkanMemoryModel_                             = {},
                                                                  Bool32 vulkanMemoryModelDeviceScope_                  = {},
                                                                  Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
                                                                  void * pNext_                                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vulkanMemoryModel{ vulkanMemoryModel_ }
      , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ }
      , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModel = vulkanMemoryModel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
      setVulkanMemoryModelDeviceScope( Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
      setVulkanMemoryModelAvailabilityVisibilityChains( Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

    operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

    operator VkPhysicalDeviceVulkanMemoryModelFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

    operator VkPhysicalDeviceVulkanMemoryModelFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
             ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
             ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
#  endif
    }

    bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                         = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
    void *        pNext                                         = {};
    Bool32        vulkanMemoryModel                             = {};
    Bool32        vulkanMemoryModelDeviceScope                  = {};
    Bool32        vulkanMemoryModelAvailabilityVisibilityChains = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceVulkanMemoryModelFeatures>
  {
    using Type = PhysicalDeviceVulkanMemoryModelFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
  {
    using Type = PhysicalDeviceVulkanMemoryModelFeatures;
  };

  using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;

  // wrapper struct for struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.html
  struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
  {
    using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( Bool32 workgroupMemoryExplicitLayout_                  = {},
                                                                                 Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {},
                                                                                 Bool32 workgroupMemoryExplicitLayout8BitAccess_        = {},
                                                                                 Bool32 workgroupMemoryExplicitLayout16BitAccess_       = {},
                                                                                 void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , workgroupMemoryExplicitLayout{ workgroupMemoryExplicitLayout_ }
      , workgroupMemoryExplicitLayoutScalarBlockLayout{ workgroupMemoryExplicitLayoutScalarBlockLayout_ }
      , workgroupMemoryExplicitLayout8BitAccess{ workgroupMemoryExplicitLayout8BitAccess_ }
      , workgroupMemoryExplicitLayout16BitAccess{ workgroupMemoryExplicitLayout16BitAccess_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
    {
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      setWorkgroupMemoryExplicitLayout( Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      setWorkgroupMemoryExplicitLayoutScalarBlockLayout( Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      setWorkgroupMemoryExplicitLayout8BitAccess( Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      setWorkgroupMemoryExplicitLayout16BitAccess( Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       workgroupMemoryExplicitLayout,
                       workgroupMemoryExplicitLayoutScalarBlockLayout,
                       workgroupMemoryExplicitLayout8BitAccess,
                       workgroupMemoryExplicitLayout16BitAccess );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) &&
             ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) &&
             ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) &&
             ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
#  endif
    }

    bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                                          = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
    void *        pNext                                          = {};
    Bool32        workgroupMemoryExplicitLayout                  = {};
    Bool32        workgroupMemoryExplicitLayoutScalarBlockLayout = {};
    Bool32        workgroupMemoryExplicitLayout8BitAccess        = {};
    Bool32        workgroupMemoryExplicitLayout16BitAccess       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
  {
    using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
  {
    using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  };

  // wrapper struct for struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT.html
  struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( Bool32 ycbcr2plane444Formats_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , ycbcr2plane444Formats{ ycbcr2plane444Formats_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setYcbcr2plane444Formats( Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcr2plane444Formats = ycbcr2plane444Formats_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, ycbcr2plane444Formats );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
#  endif
    }

    bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
    void *        pNext                 = {};
    Bool32        ycbcr2plane444Formats = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrDegammaFeaturesQCOM.html
  struct PhysicalDeviceYcbcrDegammaFeaturesQCOM
  {
    using NativeType = VkPhysicalDeviceYcbcrDegammaFeaturesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , ycbcrDegamma{ ycbcrDegamma_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcrDegammaFeaturesQCOM( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceYcbcrDegammaFeaturesQCOM( *reinterpret_cast<PhysicalDeviceYcbcrDegammaFeaturesQCOM const *>( &rhs ) )
    {
    }

    PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceYcbcrDegammaFeaturesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setYcbcrDegamma( Bool32 ycbcrDegamma_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrDegamma = ycbcrDegamma_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *>( this );
    }

    operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, ycbcrDegamma );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & ) const = default;
#else
    bool operator==( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrDegamma == rhs.ycbcrDegamma );
#  endif
    }

    bool operator!=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM;
    void *        pNext        = {};
    Bool32        ycbcrDegamma = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceYcbcrDegammaFeaturesQCOM>
  {
    using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM>
  {
    using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM;
  };

  // wrapper struct for struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrImageArraysFeaturesEXT.html
  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( Bool32 ycbcrImageArrays_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , ycbcrImageArrays{ ycbcrImageArrays_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrImageArrays = ycbcrImageArrays_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, ycbcrImageArrays );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
#  endif
    }

    bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
    void *        pNext            = {};
    Bool32        ycbcrImageArrays = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT.html
  struct PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT
  {
    using NativeType = VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( Bool32 zeroInitializeDeviceMemory_ = {},
                                                                              void * pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , zeroInitializeDeviceMemory{ zeroInitializeDeviceMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT( *reinterpret_cast<PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *>( &rhs ) )
    {
    }

    PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT &
      operator=( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & operator=( VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT &
      setZeroInitializeDeviceMemory( Bool32 zeroInitializeDeviceMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      zeroInitializeDeviceMemory = zeroInitializeDeviceMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, zeroInitializeDeviceMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( zeroInitializeDeviceMemory == rhs.zeroInitializeDeviceMemory );
#  endif
    }

    bool operator!=( PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
    void *        pNext                      = {};
    Bool32        zeroInitializeDeviceMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT>
  {
    using Type = PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT>
  {
    using Type = PhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT;
  };

  // wrapper struct for struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures.html
  struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
  {
    using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( Bool32 shaderZeroInitializeWorkgroupMemory_ = {},
                                                                              void * pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
    {
    }

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
      operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &
      setShaderZeroInitializeWorkgroupMemory( Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
#  endif
    }

    bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
    void *        pNext                               = {};
    Bool32        shaderZeroInitializeWorkgroupMemory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
  {
    using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
  {
    using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
  };

  using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;

  // wrapper struct for struct VkPipelineBinaryKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeyKHR.html
  struct PipelineBinaryKeyKHR
  {
    using NativeType = VkPipelineBinaryKeyKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineBinaryKeyKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( uint32_t                                                         keySize_ = {},
                                                  std::array<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> const & key_     = {},
                                                  void *                                                           pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , keySize{ keySize_ }
      , key{ key_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryKeyKHR( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryKeyKHR( *reinterpret_cast<PipelineBinaryKeyKHR const *>( &rhs ) )
    {
    }

    PipelineBinaryKeyKHR & operator=( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryKeyKHR & operator=( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryKeyKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKeySize( uint32_t keySize_ ) VULKAN_HPP_NOEXCEPT
    {
      keySize = keySize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKey( std::array<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> key_ ) VULKAN_HPP_NOEXCEPT
    {
      key = key_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryKeyKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryKeyKHR *>( this );
    }

    operator VkPipelineBinaryKeyKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryKeyKHR *>( this );
    }

    operator VkPipelineBinaryKeyKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryKeyKHR *>( this );
    }

    operator VkPipelineBinaryKeyKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryKeyKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, ArrayWrapper1D<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, keySize, key );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryKeyKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( keySize == rhs.keySize ) && ( key == rhs.key );
#  endif
    }

    bool operator!=( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                                sType   = StructureType::ePipelineBinaryKeyKHR;
    void *                                                       pNext   = {};
    uint32_t                                                     keySize = {};
    ArrayWrapper1D<uint8_t, VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR> key     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryKeyKHR>
  {
    using Type = PipelineBinaryKeyKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineBinaryKeyKHR>
  {
    using Type = PipelineBinaryKeyKHR;
  };

  // wrapper struct for struct VkPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataKHR.html
  struct PipelineBinaryDataKHR
  {
    using NativeType = VkPipelineBinaryDataKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( size_t dataSize_ = {}, void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
      : dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryDataKHR( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryDataKHR( *reinterpret_cast<PipelineBinaryDataKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineBinaryDataKHR( ArrayProxyNoTemporaries<T> const & data_ ) : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineBinaryDataKHR & operator=( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryDataKHR & operator=( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryDataKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineBinaryDataKHR & setData( ArrayProxyNoTemporaries<T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryDataKHR *>( this );
    }

    operator VkPipelineBinaryDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryDataKHR *>( this );
    }

    operator VkPipelineBinaryDataKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryDataKHR *>( this );
    }

    operator VkPipelineBinaryDataKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryDataKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<size_t const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryDataKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
#  endif
    }

    bool operator!=( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    size_t dataSize = {};
    void * pData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryDataKHR>
  {
    using Type = PipelineBinaryDataKHR;
  };
#endif

  // wrapper struct for struct VkPipelineBinaryKeysAndDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeysAndDataKHR.html
  struct PipelineBinaryKeysAndDataKHR
  {
    using NativeType = VkPipelineBinaryKeysAndDataKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( uint32_t                      binaryCount_         = {},
                                                          const PipelineBinaryKeyKHR *  pPipelineBinaryKeys_ = {},
                                                          const PipelineBinaryDataKHR * pPipelineBinaryData_ = {} ) VULKAN_HPP_NOEXCEPT
      : binaryCount{ binaryCount_ }
      , pPipelineBinaryKeys{ pPipelineBinaryKeys_ }
      , pPipelineBinaryData{ pPipelineBinaryData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryKeysAndDataKHR( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryKeysAndDataKHR( *reinterpret_cast<PipelineBinaryKeysAndDataKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryKeysAndDataKHR( ArrayProxyNoTemporaries<const PipelineBinaryKeyKHR> const &  pipelineBinaryKeys_,
                                  ArrayProxyNoTemporaries<const PipelineBinaryDataKHR> const & pipelineBinaryData_ = {} )
      : binaryCount( static_cast<uint32_t>( pipelineBinaryKeys_.size() ) )
      , pPipelineBinaryKeys( pipelineBinaryKeys_.data() )
      , pPipelineBinaryData( pipelineBinaryData_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( pipelineBinaryKeys_.size() == pipelineBinaryData_.size() );
#    else
      if ( pipelineBinaryKeys_.size() != pipelineBinaryData_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::PipelineBinaryKeysAndDataKHR::PipelineBinaryKeysAndDataKHR: pipelineBinaryKeys_.size() != pipelineBinaryData_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineBinaryKeysAndDataKHR & operator=( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryKeysAndDataKHR & operator=( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryKeysAndDataKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      binaryCount = binaryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setPPipelineBinaryKeys( const PipelineBinaryKeyKHR * pPipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineBinaryKeys = pPipelineBinaryKeys_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryKeysAndDataKHR & setPipelineBinaryKeys( ArrayProxyNoTemporaries<const PipelineBinaryKeyKHR> const & pipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      binaryCount         = static_cast<uint32_t>( pipelineBinaryKeys_.size() );
      pPipelineBinaryKeys = pipelineBinaryKeys_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setPPipelineBinaryData( const PipelineBinaryDataKHR * pPipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineBinaryData = pPipelineBinaryData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryKeysAndDataKHR & setPipelineBinaryData( ArrayProxyNoTemporaries<const PipelineBinaryDataKHR> const & pipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT
    {
      binaryCount         = static_cast<uint32_t>( pipelineBinaryData_.size() );
      pPipelineBinaryData = pipelineBinaryData_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryKeysAndDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryKeysAndDataKHR *>( this );
    }

    operator VkPipelineBinaryKeysAndDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryKeysAndDataKHR *>( this );
    }

    operator VkPipelineBinaryKeysAndDataKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryKeysAndDataKHR *>( this );
    }

    operator VkPipelineBinaryKeysAndDataKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryKeysAndDataKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, const PipelineBinaryKeyKHR * const &, const PipelineBinaryDataKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( binaryCount, pPipelineBinaryKeys, pPipelineBinaryData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryKeysAndDataKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaryKeys == rhs.pPipelineBinaryKeys ) && ( pPipelineBinaryData == rhs.pPipelineBinaryData );
#  endif
    }

    bool operator!=( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                      binaryCount         = {};
    const PipelineBinaryKeyKHR *  pPipelineBinaryKeys = {};
    const PipelineBinaryDataKHR * pPipelineBinaryData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryKeysAndDataKHR>
  {
    using Type = PipelineBinaryKeysAndDataKHR;
  };
#endif

  // wrapper struct for struct VkPipelineCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateInfoKHR.html
  struct PipelineCreateInfoKHR
  {
    using NativeType = VkPipelineCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreateInfoKHR( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreateInfoKHR( *reinterpret_cast<PipelineCreateInfoKHR const *>( &rhs ) )
    {
    }

    PipelineCreateInfoKHR & operator=( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCreateInfoKHR & operator=( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCreateInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreateInfoKHR *>( this );
    }

    operator VkPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreateInfoKHR *>( this );
    }

    operator VkPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCreateInfoKHR *>( this );
    }

    operator VkPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::ePipelineCreateInfoKHR;
    void *        pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCreateInfoKHR>
  {
    using Type = PipelineCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCreateInfoKHR>
  {
    using Type = PipelineCreateInfoKHR;
  };

  // wrapper struct for struct VkPipelineBinaryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryCreateInfoKHR.html
  struct PipelineBinaryCreateInfoKHR
  {
    using NativeType = VkPipelineBinaryCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineBinaryCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( const PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_    = {},
                                                         Pipeline                             pipeline_            = {},
                                                         const PipelineCreateInfoKHR *        pPipelineCreateInfo_ = {},
                                                         const void *                         pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pKeysAndDataInfo{ pKeysAndDataInfo_ }
      , pipeline{ pipeline_ }
      , pPipelineCreateInfo{ pPipelineCreateInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryCreateInfoKHR( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryCreateInfoKHR( *reinterpret_cast<PipelineBinaryCreateInfoKHR const *>( &rhs ) )
    {
    }

    PipelineBinaryCreateInfoKHR & operator=( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryCreateInfoKHR & operator=( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPKeysAndDataInfo( const PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pKeysAndDataInfo = pKeysAndDataInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPPipelineCreateInfo( const PipelineCreateInfoKHR * pPipelineCreateInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineCreateInfo = pPipelineCreateInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( this );
    }

    operator VkPipelineBinaryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryCreateInfoKHR *>( this );
    }

    operator VkPipelineBinaryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( this );
    }

    operator VkPipelineBinaryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, const PipelineBinaryKeysAndDataKHR * const &, Pipeline const &, const PipelineCreateInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pKeysAndDataInfo, pipeline, pPipelineCreateInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pKeysAndDataInfo == rhs.pKeysAndDataInfo ) && ( pipeline == rhs.pipeline ) &&
             ( pPipelineCreateInfo == rhs.pPipelineCreateInfo );
#  endif
    }

    bool operator!=( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType               = StructureType::ePipelineBinaryCreateInfoKHR;
    const void *                         pNext               = {};
    const PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo    = {};
    Pipeline                             pipeline            = {};
    const PipelineCreateInfoKHR *        pPipelineCreateInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryCreateInfoKHR>
  {
    using Type = PipelineBinaryCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineBinaryCreateInfoKHR>
  {
    using Type = PipelineBinaryCreateInfoKHR;
  };

  // wrapper struct for struct VkPipelineBinaryDataInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataInfoKHR.html
  struct PipelineBinaryDataInfoKHR
  {
    using NativeType = VkPipelineBinaryDataInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineBinaryDataInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryKHR pipelineBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBinary{ pipelineBinary_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryDataInfoKHR( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryDataInfoKHR( *reinterpret_cast<PipelineBinaryDataInfoKHR const *>( &rhs ) )
    {
    }

    PipelineBinaryDataInfoKHR & operator=( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryDataInfoKHR & operator=( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryDataInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPipelineBinary( PipelineBinaryKHR pipelineBinary_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBinary = pipelineBinary_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( this );
    }

    operator VkPipelineBinaryDataInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryDataInfoKHR *>( this );
    }

    operator VkPipelineBinaryDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( this );
    }

    operator VkPipelineBinaryDataInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryDataInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PipelineBinaryKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineBinary );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryDataInfoKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinary == rhs.pipelineBinary );
#  endif
    }

    bool operator!=( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType          = StructureType::ePipelineBinaryDataInfoKHR;
    void *            pNext          = {};
    PipelineBinaryKHR pipelineBinary = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryDataInfoKHR>
  {
    using Type = PipelineBinaryDataInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineBinaryDataInfoKHR>
  {
    using Type = PipelineBinaryDataInfoKHR;
  };

  // wrapper struct for struct VkPipelineBinaryHandlesInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryHandlesInfoKHR.html
  struct PipelineBinaryHandlesInfoKHR
  {
    using NativeType = VkPipelineBinaryHandlesInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineBinaryHandlesInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( uint32_t            pipelineBinaryCount_ = {},
                                                       PipelineBinaryKHR * pPipelineBinaries_   = {},
                                                       const void *        pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBinaryCount{ pipelineBinaryCount_ }
      , pPipelineBinaries{ pPipelineBinaries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryHandlesInfoKHR( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryHandlesInfoKHR( *reinterpret_cast<PipelineBinaryHandlesInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryHandlesInfoKHR( ArrayProxyNoTemporaries<PipelineBinaryKHR> const & pipelineBinaries_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), pipelineBinaryCount( static_cast<uint32_t>( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineBinaryHandlesInfoKHR & operator=( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryHandlesInfoKHR & operator=( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryHandlesInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPipelineBinaryCount( uint32_t pipelineBinaryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBinaryCount = pipelineBinaryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPPipelineBinaries( PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineBinaries = pPipelineBinaries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryHandlesInfoKHR & setPipelineBinaries( ArrayProxyNoTemporaries<PipelineBinaryKHR> const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBinaryCount = static_cast<uint32_t>( pipelineBinaries_.size() );
      pPipelineBinaries   = pipelineBinaries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryHandlesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryHandlesInfoKHR *>( this );
    }

    operator VkPipelineBinaryHandlesInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( this );
    }

    operator VkPipelineBinaryHandlesInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryHandlesInfoKHR *>( this );
    }

    operator VkPipelineBinaryHandlesInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, PipelineBinaryKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineBinaryCount, pPipelineBinaries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryHandlesInfoKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryCount == rhs.pipelineBinaryCount ) &&
             ( pPipelineBinaries == rhs.pPipelineBinaries );
#  endif
    }

    bool operator!=( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType               = StructureType::ePipelineBinaryHandlesInfoKHR;
    const void *        pNext               = {};
    uint32_t            pipelineBinaryCount = {};
    PipelineBinaryKHR * pPipelineBinaries   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryHandlesInfoKHR>
  {
    using Type = PipelineBinaryHandlesInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineBinaryHandlesInfoKHR>
  {
    using Type = PipelineBinaryHandlesInfoKHR;
  };

  // wrapper struct for struct VkPipelineBinaryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryInfoKHR.html
  struct PipelineBinaryInfoKHR
  {
    using NativeType = VkPipelineBinaryInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineBinaryInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineBinaryInfoKHR( uint32_t binaryCount_ = {}, const PipelineBinaryKHR * pPipelineBinaries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , binaryCount{ binaryCount_ }
      , pPipelineBinaries{ pPipelineBinaries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineBinaryInfoKHR( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineBinaryInfoKHR( *reinterpret_cast<PipelineBinaryInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryInfoKHR( ArrayProxyNoTemporaries<const PipelineBinaryKHR> const & pipelineBinaries_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), binaryCount( static_cast<uint32_t>( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineBinaryInfoKHR & operator=( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineBinaryInfoKHR & operator=( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineBinaryInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      binaryCount = binaryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPPipelineBinaries( const PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineBinaries = pPipelineBinaries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineBinaryInfoKHR & setPipelineBinaries( ArrayProxyNoTemporaries<const PipelineBinaryKHR> const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT
    {
      binaryCount       = static_cast<uint32_t>( pipelineBinaries_.size() );
      pPipelineBinaries = pipelineBinaries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineBinaryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineBinaryInfoKHR *>( this );
    }

    operator VkPipelineBinaryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineBinaryInfoKHR *>( this );
    }

    operator VkPipelineBinaryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineBinaryInfoKHR *>( this );
    }

    operator VkPipelineBinaryInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineBinaryInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PipelineBinaryKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, binaryCount, pPipelineBinaries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineBinaryInfoKHR const & ) const = default;
#else
    bool operator==( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaries == rhs.pPipelineBinaries );
#  endif
    }

    bool operator!=( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType             = StructureType::ePipelineBinaryInfoKHR;
    const void *              pNext             = {};
    uint32_t                  binaryCount       = {};
    const PipelineBinaryKHR * pPipelineBinaries = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineBinaryInfoKHR>
  {
    using Type = PipelineBinaryInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineBinaryInfoKHR>
  {
    using Type = PipelineBinaryInfoKHR;
  };

  // wrapper struct for struct VkPipelineCacheCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheCreateInfo.html
  struct PipelineCacheCreateInfo
  {
    using NativeType = VkPipelineCacheCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCacheCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_           = {},
                                                  size_t                   initialDataSize_ = {},
                                                  const void *             pInitialData_    = {},
                                                  const void *             pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , initialDataSize{ initialDataSize_ }
      , pInitialData{ pInitialData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_, ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialDataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
    {
      pInitialData = pInitialData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineCacheCreateInfo & setInitialData( ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialData_.size() * sizeof( T );
      pInitialData    = initialData_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCacheCreateInfo *>( this );
    }

    operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCacheCreateInfo *>( this );
    }

    operator VkPipelineCacheCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCacheCreateInfo *>( this );
    }

    operator VkPipelineCacheCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCacheCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCacheCreateFlags const &, size_t const &, const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
#else
    bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) &&
             ( pInitialData == rhs.pInitialData );
#  endif
    }

    bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType           = StructureType::ePipelineCacheCreateInfo;
    const void *             pNext           = {};
    PipelineCacheCreateFlags flags           = {};
    size_t                   initialDataSize = {};
    const void *             pInitialData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCacheCreateInfo>
  {
    using Type = PipelineCacheCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
  {
    using Type = PipelineCacheCreateInfo;
  };

  // wrapper struct for struct VkPipelineCacheHeaderVersionDataGraphQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheHeaderVersionDataGraphQCOM.html
  struct PipelineCacheHeaderVersionDataGraphQCOM
  {
    using NativeType = VkPipelineCacheHeaderVersionDataGraphQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM(
      uint32_t                                                                        headerSize_       = {},
      PipelineCacheHeaderVersion                                                      headerVersion_    = PipelineCacheHeaderVersion::eOne,
      DataGraphModelCacheTypeQCOM                                                     cacheType_        = DataGraphModelCacheTypeQCOM::eGenericBinary,
      uint32_t                                                                        cacheVersion_     = {},
      std::array<uint32_t, VK_DATA_GRAPH_MODEL_TOOLCHAIN_VERSION_LENGTH_QCOM> const & toolchainVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : headerSize{ headerSize_ }
      , headerVersion{ headerVersion_ }
      , cacheType{ cacheType_ }
      , cacheVersion{ cacheVersion_ }
      , toolchainVersion{ toolchainVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM( PipelineCacheHeaderVersionDataGraphQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCacheHeaderVersionDataGraphQCOM( VkPipelineCacheHeaderVersionDataGraphQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCacheHeaderVersionDataGraphQCOM( *reinterpret_cast<PipelineCacheHeaderVersionDataGraphQCOM const *>( &rhs ) )
    {
    }

    PipelineCacheHeaderVersionDataGraphQCOM & operator=( PipelineCacheHeaderVersionDataGraphQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCacheHeaderVersionDataGraphQCOM & operator=( VkPipelineCacheHeaderVersionDataGraphQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCacheHeaderVersionDataGraphQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
    {
      headerSize = headerSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM & setHeaderVersion( PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      headerVersion = headerVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM & setCacheType( DataGraphModelCacheTypeQCOM cacheType_ ) VULKAN_HPP_NOEXCEPT
    {
      cacheType = cacheType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM & setCacheVersion( uint32_t cacheVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      cacheVersion = cacheVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionDataGraphQCOM &
      setToolchainVersion( std::array<uint32_t, VK_DATA_GRAPH_MODEL_TOOLCHAIN_VERSION_LENGTH_QCOM> toolchainVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      toolchainVersion = toolchainVersion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCacheHeaderVersionDataGraphQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCacheHeaderVersionDataGraphQCOM *>( this );
    }

    operator VkPipelineCacheHeaderVersionDataGraphQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCacheHeaderVersionDataGraphQCOM *>( this );
    }

    operator VkPipelineCacheHeaderVersionDataGraphQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCacheHeaderVersionDataGraphQCOM *>( this );
    }

    operator VkPipelineCacheHeaderVersionDataGraphQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCacheHeaderVersionDataGraphQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               PipelineCacheHeaderVersion const &,
               DataGraphModelCacheTypeQCOM const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, VK_DATA_GRAPH_MODEL_TOOLCHAIN_VERSION_LENGTH_QCOM> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( headerSize, headerVersion, cacheType, cacheVersion, toolchainVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCacheHeaderVersionDataGraphQCOM const & ) const = default;
#else
    bool operator==( PipelineCacheHeaderVersionDataGraphQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( cacheType == rhs.cacheType ) &&
             ( cacheVersion == rhs.cacheVersion ) && ( toolchainVersion == rhs.toolchainVersion );
#  endif
    }

    bool operator!=( PipelineCacheHeaderVersionDataGraphQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                                                    headerSize       = {};
    PipelineCacheHeaderVersion                                                  headerVersion    = PipelineCacheHeaderVersion::eOne;
    DataGraphModelCacheTypeQCOM                                                 cacheType        = DataGraphModelCacheTypeQCOM::eGenericBinary;
    uint32_t                                                                    cacheVersion     = {};
    ArrayWrapper1D<uint32_t, VK_DATA_GRAPH_MODEL_TOOLCHAIN_VERSION_LENGTH_QCOM> toolchainVersion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCacheHeaderVersionDataGraphQCOM>
  {
    using Type = PipelineCacheHeaderVersionDataGraphQCOM;
  };
#endif

  // wrapper struct for struct VkPipelineCacheHeaderVersionOne, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheHeaderVersionOne.html
  struct PipelineCacheHeaderVersionOne
  {
    using NativeType = VkPipelineCacheHeaderVersionOne;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( uint32_t                                  headerSize_        = {},
                                                           PipelineCacheHeaderVersion                headerVersion_     = PipelineCacheHeaderVersion::eOne,
                                                           uint32_t                                  vendorID_          = {},
                                                           uint32_t                                  deviceID_          = {},
                                                           std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT
      : headerSize{ headerSize_ }
      , headerVersion{ headerVersion_ }
      , vendorID{ vendorID_ }
      , deviceID{ deviceID_ }
      , pipelineCacheUUID{ pipelineCacheUUID_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
    {
    }

    PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
    {
      headerSize = headerSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderVersion( PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      headerVersion = headerVersion_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
    {
      vendorID = vendorID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceID = deviceID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t, VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCacheUUID = pipelineCacheUUID_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne *>( this );
    }

    operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCacheHeaderVersionOne *>( this );
    }

    operator VkPipelineCacheHeaderVersionOne const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCacheHeaderVersionOne *>( this );
    }

    operator VkPipelineCacheHeaderVersionOne *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCacheHeaderVersionOne *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, PipelineCacheHeaderVersion const &, uint32_t const &, uint32_t const &, ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default;
#else
    bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
             ( pipelineCacheUUID == rhs.pipelineCacheUUID );
#  endif
    }

    bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                              headerSize        = {};
    PipelineCacheHeaderVersion            headerVersion     = PipelineCacheHeaderVersion::eOne;
    uint32_t                              vendorID          = {};
    uint32_t                              deviceID          = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCacheHeaderVersionOne>
  {
    using Type = PipelineCacheHeaderVersionOne;
  };
#endif

  // wrapper struct for struct VkPipelineColorBlendAdvancedStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAdvancedStateCreateInfoEXT.html
  struct PipelineColorBlendAdvancedStateCreateInfoEXT
  {
    using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32          srcPremultiplied_ = {},
                                                                       Bool32          dstPremultiplied_ = {},
                                                                       BlendOverlapEXT blendOverlap_     = BlendOverlapEXT::eUncorrelated,
                                                                       const void *    pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcPremultiplied{ srcPremultiplied_ }
      , dstPremultiplied{ dstPremultiplied_ }
      , blendOverlap{ blendOverlap_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      srcPremultiplied = srcPremultiplied_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      dstPremultiplied = dstPremultiplied_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
    {
      blendOverlap = blendOverlap_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, Bool32 const &, BlendOverlapEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) &&
             ( blendOverlap == rhs.blendOverlap );
#  endif
    }

    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType            = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
    const void *    pNext            = {};
    Bool32          srcPremultiplied = {};
    Bool32          dstPremultiplied = {};
    BlendOverlapEXT blendOverlap     = BlendOverlapEXT::eUncorrelated;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineColorBlendAdvancedStateCreateInfoEXT>
  {
    using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
  {
    using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineColorWriteCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorWriteCreateInfoEXT.html
  struct PipelineColorWriteCreateInfoEXT
  {
    using NativeType = VkPipelineColorWriteCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineColorWriteCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t       attachmentCount_    = {},
                                                          const Bool32 * pColorWriteEnables_ = {},
                                                          const void *   pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentCount{ attachmentCount_ }
      , pColorWriteEnables{ pColorWriteEnables_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorWriteCreateInfoEXT( ArrayProxyNoTemporaries<const Bool32> const & colorWriteEnables_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPColorWriteEnables( const Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorWriteEnables = pColorWriteEnables_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorWriteCreateInfoEXT & setColorWriteEnables( ArrayProxyNoTemporaries<const Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount    = static_cast<uint32_t>( colorWriteEnables_.size() );
      pColorWriteEnables = colorWriteEnables_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT *>( this );
    }

    operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT *>( this );
    }

    operator VkPipelineColorWriteCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT *>( this );
    }

    operator VkPipelineColorWriteCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineColorWriteCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Bool32 * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachmentCount, pColorWriteEnables );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables );
#  endif
    }

    bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType              = StructureType::ePipelineColorWriteCreateInfoEXT;
    const void *   pNext              = {};
    uint32_t       attachmentCount    = {};
    const Bool32 * pColorWriteEnables = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineColorWriteCreateInfoEXT>
  {
    using Type = PipelineColorWriteCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
  {
    using Type = PipelineColorWriteCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineCompilerControlCreateInfoAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCompilerControlCreateInfoAMD.html
  struct PipelineCompilerControlCreateInfoAMD
  {
    using NativeType = VkPipelineCompilerControlCreateInfoAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCompilerControlCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlFlagsAMD compilerControlFlags_ = {},
                                                               const void *                    pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , compilerControlFlags{ compilerControlFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
    {
    }

    PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD &
      setCompilerControlFlags( PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      compilerControlFlags = compilerControlFlags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

    operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

    operator VkPipelineCompilerControlCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

    operator VkPipelineCompilerControlCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCompilerControlFlagsAMD const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, compilerControlFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default;
#else
    bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags );
#  endif
    }

    bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                = StructureType::ePipelineCompilerControlCreateInfoAMD;
    const void *                    pNext                = {};
    PipelineCompilerControlFlagsAMD compilerControlFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCompilerControlCreateInfoAMD>
  {
    using Type = PipelineCompilerControlCreateInfoAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
  {
    using Type = PipelineCompilerControlCreateInfoAMD;
  };

  // wrapper struct for struct VkPipelineCoverageModulationStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageModulationStateCreateInfoNV.html
  struct PipelineCoverageModulationStateCreateInfoNV
  {
    using NativeType = VkPipelineCoverageModulationStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCoverageModulationStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_     = {},
                                                                      CoverageModulationModeNV coverageModulationMode_        = CoverageModulationModeNV::eNone,
                                                                      Bool32                   coverageModulationTableEnable_ = {},
                                                                      uint32_t                 coverageModulationTableCount_  = {},
                                                                      const float *            pCoverageModulationTable_      = {},
                                                                      const void *             pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , coverageModulationMode{ coverageModulationMode_ }
      , coverageModulationTableEnable{ coverageModulationTableEnable_ }
      , coverageModulationTableCount{ coverageModulationTableCount_ }
      , pCoverageModulationTable{ pCoverageModulationTable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_,
                                                 CoverageModulationModeNV                     coverageModulationMode_,
                                                 Bool32                                       coverageModulationTableEnable_,
                                                 ArrayProxyNoTemporaries<const float> const & coverageModulationTable_,
                                                 const void *                                 pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , coverageModulationMode( coverageModulationMode_ )
      , coverageModulationTableEnable( coverageModulationTableEnable_ )
      , coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) )
      , pCoverageModulationTable( coverageModulationTable_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
      setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationMode = coverageModulationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
      setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableEnable = coverageModulationTableEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
      setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableCount = coverageModulationTableCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
      setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
    {
      pCoverageModulationTable = pCoverageModulationTable_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCoverageModulationStateCreateInfoNV &
      setCoverageModulationTable( ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
      pCoverageModulationTable     = coverageModulationTable_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageModulationStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageModulationStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCoverageModulationStateCreateFlagsNV const &,
               CoverageModulationModeNV const &,
               Bool32 const &,
               uint32_t const &,
               const float * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) &&
             ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) &&
             ( pCoverageModulationTable == rhs.pCoverageModulationTable );
#  endif
    }

    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                sType                         = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
    const void *                                 pNext                         = {};
    PipelineCoverageModulationStateCreateFlagsNV flags                         = {};
    CoverageModulationModeNV                     coverageModulationMode        = CoverageModulationModeNV::eNone;
    Bool32                                       coverageModulationTableEnable = {};
    uint32_t                                     coverageModulationTableCount  = {};
    const float *                                pCoverageModulationTable      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCoverageModulationStateCreateInfoNV>
  {
    using Type = PipelineCoverageModulationStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
  {
    using Type = PipelineCoverageModulationStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineCoverageReductionStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageReductionStateCreateInfoNV.html
  struct PipelineCoverageReductionStateCreateInfoNV
  {
    using NativeType = VkPipelineCoverageReductionStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCoverageReductionStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
                                                                     CoverageReductionModeNV coverageReductionMode_     = CoverageReductionModeNV::eMerge,
                                                                     const void *            pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , coverageReductionMode{ coverageReductionMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
    {
    }

    PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setFlags( PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &
      setCoverageReductionMode( CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageReductionMode = coverageReductionMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageReductionStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageReductionStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCoverageReductionStateCreateFlagsNV const &, CoverageReductionModeNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, coverageReductionMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode );
#  endif
    }

    bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                               sType                 = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
    const void *                                pNext                 = {};
    PipelineCoverageReductionStateCreateFlagsNV flags                 = {};
    CoverageReductionModeNV                     coverageReductionMode = CoverageReductionModeNV::eMerge;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCoverageReductionStateCreateInfoNV>
  {
    using Type = PipelineCoverageReductionStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
  {
    using Type = PipelineCoverageReductionStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineCoverageToColorStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageToColorStateCreateInfoNV.html
  struct PipelineCoverageToColorStateCreateInfoNV
  {
    using NativeType = VkPipelineCoverageToColorStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCoverageToColorStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_                   = {},
                                                                   Bool32                                    coverageToColorEnable_   = {},
                                                                   uint32_t                                  coverageToColorLocation_ = {},
                                                                   const void *                              pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , coverageToColorEnable{ coverageToColorEnable_ }
      , coverageToColorLocation{ coverageToColorLocation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
    {
    }

    PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageToColorEnable = coverageToColorEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageToColorLocation = coverageToColorLocation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageToColorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageToColorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCoverageToColorStateCreateFlagsNV const &, Bool32 const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) &&
             ( coverageToColorLocation == rhs.coverageToColorLocation );
#  endif
    }

    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                             sType                   = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
    const void *                              pNext                   = {};
    PipelineCoverageToColorStateCreateFlagsNV flags                   = {};
    Bool32                                    coverageToColorEnable   = {};
    uint32_t                                  coverageToColorLocation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCoverageToColorStateCreateInfoNV>
  {
    using Type = PipelineCoverageToColorStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
  {
    using Type = PipelineCoverageToColorStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineCreateFlags2CreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlags2CreateInfo.html
  struct PipelineCreateFlags2CreateInfo
  {
    using NativeType = VkPipelineCreateFlags2CreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCreateFlags2CreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2 flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreateFlags2CreateInfo( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreateFlags2CreateInfo( *reinterpret_cast<PipelineCreateFlags2CreateInfo const *>( &rhs ) )
    {
    }

    PipelineCreateFlags2CreateInfo & operator=( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCreateFlags2CreateInfo & operator=( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCreateFlags2CreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setFlags( PipelineCreateFlags2 flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCreateFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreateFlags2CreateInfo *>( this );
    }

    operator VkPipelineCreateFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreateFlags2CreateInfo *>( this );
    }

    operator VkPipelineCreateFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCreateFlags2CreateInfo *>( this );
    }

    operator VkPipelineCreateFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCreateFlags2CreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCreateFlags2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreateFlags2CreateInfo const & ) const = default;
#else
    bool operator==( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType = StructureType::ePipelineCreateFlags2CreateInfo;
    const void *         pNext = {};
    PipelineCreateFlags2 flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCreateFlags2CreateInfo>
  {
    using Type = PipelineCreateFlags2CreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCreateFlags2CreateInfo>
  {
    using Type = PipelineCreateFlags2CreateInfo;
  };

  using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo;

  // wrapper struct for struct VkPipelineCreationFeedback, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedback.html
  struct PipelineCreationFeedback
  {
    using NativeType = VkPipelineCreationFeedback;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags{ flags_ }
      , duration{ duration_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
    {
    }

    PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCreationFeedback const *>( &rhs );
      return *this;
    }

    operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreationFeedback *>( this );
    }

    operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreationFeedback *>( this );
    }

    operator VkPipelineCreationFeedback const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCreationFeedback *>( this );
    }

    operator VkPipelineCreationFeedback *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCreationFeedback *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<PipelineCreationFeedbackFlags const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( flags, duration );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreationFeedback const & ) const = default;
#else
    bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( flags == rhs.flags ) && ( duration == rhs.duration );
#  endif
    }

    bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    PipelineCreationFeedbackFlags flags    = {};
    uint64_t                      duration = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCreationFeedback>
  {
    using Type = PipelineCreationFeedback;
  };
#endif
  using PipelineCreationFeedbackEXT = PipelineCreationFeedback;

  // wrapper struct for struct VkPipelineCreationFeedbackCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedbackCreateInfo.html
  struct PipelineCreationFeedbackCreateInfo
  {
    using NativeType = VkPipelineCreationFeedbackCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineCreationFeedbackCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedback * pPipelineCreationFeedback_          = {},
                                                             uint32_t                   pipelineStageCreationFeedbackCount_ = {},
                                                             PipelineCreationFeedback * pPipelineStageCreationFeedbacks_    = {},
                                                             const void *               pNext_                              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pPipelineCreationFeedback{ pPipelineCreationFeedback_ }
      , pipelineStageCreationFeedbackCount{ pipelineStageCreationFeedbackCount_ }
      , pPipelineStageCreationFeedbacks{ pPipelineStageCreationFeedbacks_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCreationFeedbackCreateInfo( PipelineCreationFeedback *                                pPipelineCreationFeedback_,
                                        ArrayProxyNoTemporaries<PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_,
                                        const void *                                              pNext_ = nullptr )
      : pNext( pNext_ )
      , pPipelineCreationFeedback( pPipelineCreationFeedback_ )
      , pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) )
      , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
      setPPipelineCreationFeedback( PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineCreationFeedback = pPipelineCreationFeedback_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
      setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &
      setPPipelineStageCreationFeedbacks( PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCreationFeedbackCreateInfo &
      setPipelineStageCreationFeedbacks( ArrayProxyNoTemporaries<PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
      pPipelineStageCreationFeedbacks    = pipelineStageCreationFeedbacks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo *>( this );
    }

    operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo *>( this );
    }

    operator VkPipelineCreationFeedbackCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo *>( this );
    }

    operator VkPipelineCreationFeedbackCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineCreationFeedbackCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineCreationFeedback * const &, uint32_t const &, PipelineCreationFeedback * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default;
#else
    bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) &&
             ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) &&
             ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
#  endif
    }

    bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                              = StructureType::ePipelineCreationFeedbackCreateInfo;
    const void *               pNext                              = {};
    PipelineCreationFeedback * pPipelineCreationFeedback          = {};
    uint32_t                   pipelineStageCreationFeedbackCount = {};
    PipelineCreationFeedback * pPipelineStageCreationFeedbacks    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineCreationFeedbackCreateInfo>
  {
    using Type = PipelineCreationFeedbackCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
  {
    using Type = PipelineCreationFeedbackCreateInfo;
  };

  using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;

  // wrapper struct for struct VkPipelineDiscardRectangleStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDiscardRectangleStateCreateInfoEXT.html
  struct PipelineDiscardRectangleStateCreateInfoEXT
  {
    using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
                                                                     DiscardRectangleModeEXT discardRectangleMode_      = DiscardRectangleModeEXT::eInclusive,
                                                                     uint32_t                discardRectangleCount_     = {},
                                                                     const Rect2D *          pDiscardRectangles_        = {},
                                                                     const void *            pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , discardRectangleMode{ discardRectangleMode_ }
      , discardRectangleCount{ discardRectangleCount_ }
      , pDiscardRectangles{ pDiscardRectangles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT   flags_,
                                                DiscardRectangleModeEXT                       discardRectangleMode_,
                                                ArrayProxyNoTemporaries<const Rect2D> const & discardRectangles_,
                                                const void *                                  pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , discardRectangleMode( discardRectangleMode_ )
      , discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) )
      , pDiscardRectangles( discardRectangles_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
      setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
    {
      discardRectangleMode = discardRectangleMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      discardRectangleCount = discardRectangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      pDiscardRectangles = pDiscardRectangles_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( ArrayProxyNoTemporaries<const Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
      pDiscardRectangles    = discardRectangles_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

    operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

    operator VkPipelineDiscardRectangleStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

    operator VkPipelineDiscardRectangleStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineDiscardRectangleStateCreateFlagsEXT const &,
               DiscardRectangleModeEXT const &,
               uint32_t const &,
               const Rect2D * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) &&
             ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles );
#  endif
    }

    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                               sType                 = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
    const void *                                pNext                 = {};
    PipelineDiscardRectangleStateCreateFlagsEXT flags                 = {};
    DiscardRectangleModeEXT                     discardRectangleMode  = DiscardRectangleModeEXT::eInclusive;
    uint32_t                                    discardRectangleCount = {};
    const Rect2D *                              pDiscardRectangles    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineDiscardRectangleStateCreateInfoEXT>
  {
    using Type = PipelineDiscardRectangleStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
  {
    using Type = PipelineDiscardRectangleStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineExecutableInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInfoKHR.html
  struct PipelineExecutableInfoKHR
  {
    using NativeType = VkPipelineExecutableInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineExecutableInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipeline{ pipeline_ }
      , executableIndex{ executableIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
    {
    }

    PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      executableIndex = executableIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableInfoKHR *>( this );
    }

    operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableInfoKHR *>( this );
    }

    operator VkPipelineExecutableInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineExecutableInfoKHR *>( this );
    }

    operator VkPipelineExecutableInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineExecutableInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Pipeline const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipeline, executableIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineExecutableInfoKHR const & ) const = default;
#else
    bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex );
#  endif
    }

    bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::ePipelineExecutableInfoKHR;
    const void *  pNext           = {};
    Pipeline      pipeline        = {};
    uint32_t      executableIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineExecutableInfoKHR>
  {
    using Type = PipelineExecutableInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
  {
    using Type = PipelineExecutableInfoKHR;
  };

  // wrapper struct for struct VkPipelineExecutableInternalRepresentationKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInternalRepresentationKHR.html
  struct PipelineExecutableInternalRepresentationKHR
  {
    using NativeType = VkPipelineExecutableInternalRepresentationKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineExecutableInternalRepresentationKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_        = {},
                                                                         std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                                                         Bool32                                            isText_      = {},
                                                                         size_t                                            dataSize_    = {},
                                                                         void *                                            pData_       = {},
                                                                         void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , name{ name_ }
      , description{ description_ }
      , isText{ isText_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
    {
    }

    PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

    operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

    operator VkPipelineExecutableInternalRepresentationKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

    operator VkPipelineExecutableInternalRepresentationKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               Bool32 const &,
               size_t const &,
               void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, name, description, isText, dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = isText <=> rhs.isText; cmp != 0 )
        return cmp;
      if ( auto cmp = dataSize <=> rhs.dataSize; cmp != 0 )
        return cmp;
      if ( auto cmp = pData <=> rhs.pData; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ) &&
             ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
    }

    bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType       = StructureType::ePipelineExecutableInternalRepresentationKHR;
    void *                                        pNext       = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
    Bool32                                        isText      = {};
    size_t                                        dataSize    = {};
    void *                                        pData       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineExecutableInternalRepresentationKHR>
  {
    using Type = PipelineExecutableInternalRepresentationKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
  {
    using Type = PipelineExecutableInternalRepresentationKHR;
  };

  // wrapper struct for struct VkPipelineExecutablePropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutablePropertiesKHR.html
  struct PipelineExecutablePropertiesKHR
  {
    using NativeType = VkPipelineExecutablePropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineExecutablePropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( ShaderStageFlags                                  stages_       = {},
                                                             std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_         = {},
                                                             std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_  = {},
                                                             uint32_t                                          subgroupSize_ = {},
                                                             void *                                            pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stages{ stages_ }
      , name{ name_ }
      , description{ description_ }
      , subgroupSize{ subgroupSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
    {
    }

    PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR *>( this );
    }

    operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( this );
    }

    operator VkPipelineExecutablePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineExecutablePropertiesKHR *>( this );
    }

    operator VkPipelineExecutablePropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ShaderStageFlags const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stages, name, description, subgroupSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = stages <=> rhs.stages; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = subgroupSize <=> rhs.subgroupSize; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( strcmp( name, rhs.name ) == 0 ) &&
             ( strcmp( description, rhs.description ) == 0 ) && ( subgroupSize == rhs.subgroupSize );
    }

    bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                 sType        = StructureType::ePipelineExecutablePropertiesKHR;
    void *                                        pNext        = {};
    ShaderStageFlags                              stages       = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name         = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description  = {};
    uint32_t                                      subgroupSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineExecutablePropertiesKHR>
  {
    using Type = PipelineExecutablePropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
  {
    using Type = PipelineExecutablePropertiesKHR;
  };

  union PipelineExecutableStatisticValueKHR
  {
    using NativeType = VkPipelineExecutableStatisticValueKHR;
#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( Bool32 b32_ = {} ) : b32( b32_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {}

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {}
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
    {
      b32 = b32_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
    {
      i64 = i64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
    {
      u64 = u64_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
    {
      f64 = f64_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineExecutableStatisticValueKHR const &() const
    {
      return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR *>( this );
    }

    operator VkPipelineExecutableStatisticValueKHR &()
    {
      return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    Bool32   b32;
    int64_t  i64;
    uint64_t u64;
    double   f64;
#else
    VkBool32 b32;
    int64_t  i64;
    uint64_t u64;
    double   f64;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineExecutableStatisticValueKHR>
  {
    using Type = PipelineExecutableStatisticValueKHR;
  };
#endif

  // wrapper struct for struct VkPipelineExecutableStatisticKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableStatisticKHR.html
  struct PipelineExecutableStatisticKHR
  {
    using NativeType = VkPipelineExecutableStatisticKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineExecutableStatisticKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_        = {},
                                                            std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                                            PipelineExecutableStatisticFormatKHR format_ = PipelineExecutableStatisticFormatKHR::eBool32,
                                                            PipelineExecutableStatisticValueKHR  value_  = {},
                                                            void *                               pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , name{ name_ }
      , description{ description_ }
      , format{ format_ }
      , value{ value_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
    {
    }

    PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableStatisticKHR *>( this );
    }

    operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableStatisticKHR *>( this );
    }

    operator VkPipelineExecutableStatisticKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineExecutableStatisticKHR *>( this );
    }

    operator VkPipelineExecutableStatisticKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineExecutableStatisticKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &,
               PipelineExecutableStatisticFormatKHR const &,
               PipelineExecutableStatisticValueKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, name, description, format, value );
    }
#endif

  public:
    StructureType                                 sType       = StructureType::ePipelineExecutableStatisticKHR;
    void *                                        pNext       = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
    PipelineExecutableStatisticFormatKHR          format      = PipelineExecutableStatisticFormatKHR::eBool32;
    PipelineExecutableStatisticValueKHR           value       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineExecutableStatisticKHR>
  {
    using Type = PipelineExecutableStatisticKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
  {
    using Type = PipelineExecutableStatisticKHR;
  };

  // wrapper struct for struct VkPipelineFragmentDensityMapLayeredCreateInfoVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentDensityMapLayeredCreateInfoVALVE.html
  struct PipelineFragmentDensityMapLayeredCreateInfoVALVE
  {
    using NativeType = VkPipelineFragmentDensityMapLayeredCreateInfoVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineFragmentDensityMapLayeredCreateInfoVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineFragmentDensityMapLayeredCreateInfoVALVE( uint32_t     maxFragmentDensityMapLayers_ = {},
                                                                           const void * pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxFragmentDensityMapLayers{ maxFragmentDensityMapLayers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineFragmentDensityMapLayeredCreateInfoVALVE( PipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentDensityMapLayeredCreateInfoVALVE( VkPipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineFragmentDensityMapLayeredCreateInfoVALVE( *reinterpret_cast<PipelineFragmentDensityMapLayeredCreateInfoVALVE const *>( &rhs ) )
    {
    }

    PipelineFragmentDensityMapLayeredCreateInfoVALVE & operator=( PipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineFragmentDensityMapLayeredCreateInfoVALVE & operator=( VkPipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineFragmentDensityMapLayeredCreateInfoVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentDensityMapLayeredCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentDensityMapLayeredCreateInfoVALVE &
      setMaxFragmentDensityMapLayers( uint32_t maxFragmentDensityMapLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFragmentDensityMapLayers = maxFragmentDensityMapLayers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineFragmentDensityMapLayeredCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineFragmentDensityMapLayeredCreateInfoVALVE *>( this );
    }

    operator VkPipelineFragmentDensityMapLayeredCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineFragmentDensityMapLayeredCreateInfoVALVE *>( this );
    }

    operator VkPipelineFragmentDensityMapLayeredCreateInfoVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineFragmentDensityMapLayeredCreateInfoVALVE *>( this );
    }

    operator VkPipelineFragmentDensityMapLayeredCreateInfoVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineFragmentDensityMapLayeredCreateInfoVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxFragmentDensityMapLayers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineFragmentDensityMapLayeredCreateInfoVALVE const & ) const = default;
#else
    bool operator==( PipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentDensityMapLayers == rhs.maxFragmentDensityMapLayers );
#  endif
    }

    bool operator!=( PipelineFragmentDensityMapLayeredCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                       = StructureType::ePipelineFragmentDensityMapLayeredCreateInfoVALVE;
    const void *  pNext                       = {};
    uint32_t      maxFragmentDensityMapLayers = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineFragmentDensityMapLayeredCreateInfoVALVE>
  {
    using Type = PipelineFragmentDensityMapLayeredCreateInfoVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineFragmentDensityMapLayeredCreateInfoVALVE>
  {
    using Type = PipelineFragmentDensityMapLayeredCreateInfoVALVE;
  };

  // wrapper struct for struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateEnumStateCreateInfoNV.html
  struct PipelineFragmentShadingRateEnumStateCreateInfoNV
  {
    using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(
      FragmentShadingRateTypeNV                               shadingRateType_ = FragmentShadingRateTypeNV::eFragmentSize,
      FragmentShadingRateNV                                   shadingRate_     = FragmentShadingRateNV::e1InvocationPerPixel,
      std::array<FragmentShadingRateCombinerOpKHR, 2> const & combinerOps_     = { { FragmentShadingRateCombinerOpKHR::eKeep,
                                                                                     FragmentShadingRateCombinerOpKHR::eKeep } },
      const void *                                            pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shadingRateType{ shadingRateType_ }
      , shadingRate{ shadingRate_ }
      , combinerOps{ combinerOps_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
    {
    }

    PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
      setShadingRateType( FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateType = shadingRateType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate( FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRate = shadingRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
      setCombinerOps( std::array<FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
    {
      combinerOps = combinerOps_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               FragmentShadingRateTypeNV const &,
               FragmentShadingRateNV const &,
               ArrayWrapper1D<FragmentShadingRateCombinerOpKHR, 2> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) &&
             ( combinerOps == rhs.combinerOps );
#  endif
    }

    bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                       sType           = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
    const void *                                        pNext           = {};
    FragmentShadingRateTypeNV                           shadingRateType = FragmentShadingRateTypeNV::eFragmentSize;
    FragmentShadingRateNV                               shadingRate     = FragmentShadingRateNV::e1InvocationPerPixel;
    ArrayWrapper1D<FragmentShadingRateCombinerOpKHR, 2> combinerOps     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineFragmentShadingRateEnumStateCreateInfoNV>
  {
    using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
  {
    using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineFragmentShadingRateStateCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateStateCreateInfoKHR.html
  struct PipelineFragmentShadingRateStateCreateInfoKHR
  {
    using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
      Extent2D                                                fragmentSize_ = {},
      std::array<FragmentShadingRateCombinerOpKHR, 2> const & combinerOps_  = { { FragmentShadingRateCombinerOpKHR::eKeep,
                                                                                  FragmentShadingRateCombinerOpKHR::eKeep } },
      const void *                                            pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentSize{ fragmentSize_ }
      , combinerOps{ combinerOps_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14
      PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
    {
    }

    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentSize = fragmentSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
      setCombinerOps( std::array<FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
    {
      combinerOps = combinerOps_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Extent2D const &, ArrayWrapper1D<FragmentShadingRateCombinerOpKHR, 2> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentSize, combinerOps );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps );
#  endif
    }

    bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                       sType        = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
    const void *                                        pNext        = {};
    Extent2D                                            fragmentSize = {};
    ArrayWrapper1D<FragmentShadingRateCombinerOpKHR, 2> combinerOps  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineFragmentShadingRateStateCreateInfoKHR>
  {
    using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
  {
    using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
  };

  // wrapper struct for struct VkPipelineIndirectDeviceAddressInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineIndirectDeviceAddressInfoNV.html
  struct PipelineIndirectDeviceAddressInfoNV
  {
    using NativeType = VkPipelineIndirectDeviceAddressInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineIndirectDeviceAddressInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics,
                                                              Pipeline          pipeline_          = {},
                                                              const void *      pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , pipeline{ pipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast<PipelineIndirectDeviceAddressInfoNV const *>( &rhs ) )
    {
    }

    PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineIndirectDeviceAddressInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( this );
    }

    operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineIndirectDeviceAddressInfoNV *>( this );
    }

    operator VkPipelineIndirectDeviceAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( this );
    }

    operator VkPipelineIndirectDeviceAddressInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineIndirectDeviceAddressInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineBindPoint const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineBindPoint, pipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default;
#else
    bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline );
#  endif
    }

    bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType             = StructureType::ePipelineIndirectDeviceAddressInfoNV;
    const void *      pNext             = {};
    PipelineBindPoint pipelineBindPoint = PipelineBindPoint::eGraphics;
    Pipeline          pipeline          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineIndirectDeviceAddressInfoNV>
  {
    using Type = PipelineIndirectDeviceAddressInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineIndirectDeviceAddressInfoNV>
  {
    using Type = PipelineIndirectDeviceAddressInfoNV;
  };

  // wrapper struct for struct VkPipelineInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInfoKHR.html
  struct PipelineInfoKHR
  {
    using NativeType = VkPipelineInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipeline{ pipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) ) {}

    PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineInfoKHR *>( this );
    }

    operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineInfoKHR *>( this );
    }

    operator VkPipelineInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineInfoKHR *>( this );
    }

    operator VkPipelineInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineInfoKHR const & ) const = default;
#else
    bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline );
#  endif
    }

    bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::ePipelineInfoKHR;
    const void *  pNext    = {};
    Pipeline      pipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineInfoKHR>
  {
    using Type = PipelineInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineInfoKHR>
  {
    using Type = PipelineInfoKHR;
  };

  using PipelineInfoEXT = PipelineInfoKHR;

  // wrapper struct for struct VkPipelineLayoutCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayoutCreateInfo.html
  struct PipelineLayoutCreateInfo
  {
    using NativeType = VkPipelineLayoutCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineLayoutCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateFlags   flags_                  = {},
                                                   uint32_t                    setLayoutCount_         = {},
                                                   const DescriptorSetLayout * pSetLayouts_            = {},
                                                   uint32_t                    pushConstantRangeCount_ = {},
                                                   const PushConstantRange *   pPushConstantRanges_    = {},
                                                   const void *                pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , setLayoutCount{ setLayoutCount_ }
      , pSetLayouts{ pSetLayouts_ }
      , pushConstantRangeCount{ pushConstantRangeCount_ }
      , pPushConstantRanges{ pPushConstantRanges_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo( PipelineLayoutCreateFlags                                  flags_,
                              ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_,
                              ArrayProxyNoTemporaries<const PushConstantRange> const &   pushConstantRanges_ = {},
                              const void *                                               pNext_              = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
      , pSetLayouts( setLayouts_.data() )
      , pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
      , pPushConstantRanges( pushConstantRanges_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = setLayoutCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo & setSetLayouts( ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts    = setLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = pushConstantRangeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPPushConstantRanges( const PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pPushConstantRanges = pPushConstantRanges_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo & setPushConstantRanges( ArrayProxyNoTemporaries<const PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
      pPushConstantRanges    = pushConstantRanges_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineLayoutCreateInfo *>( this );
    }

    operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineLayoutCreateInfo *>( this );
    }

    operator VkPipelineLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineLayoutCreateInfo *>( this );
    }

    operator VkPipelineLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineLayoutCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineLayoutCreateFlags const &,
               uint32_t const &,
               const DescriptorSetLayout * const &,
               uint32_t const &,
               const PushConstantRange * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
#else
    bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) &&
             ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
             ( pPushConstantRanges == rhs.pPushConstantRanges );
#  endif
    }

    bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                  = StructureType::ePipelineLayoutCreateInfo;
    const void *                pNext                  = {};
    PipelineLayoutCreateFlags   flags                  = {};
    uint32_t                    setLayoutCount         = {};
    const DescriptorSetLayout * pSetLayouts            = {};
    uint32_t                    pushConstantRangeCount = {};
    const PushConstantRange *   pPushConstantRanges    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineLayoutCreateInfo>
  {
    using Type = PipelineLayoutCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
  {
    using Type = PipelineLayoutCreateInfo;
  };

  // wrapper struct for struct VkPipelinePropertiesIdentifierEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelinePropertiesIdentifierEXT.html
  struct PipelinePropertiesIdentifierEXT
  {
    using NativeType = VkPipelinePropertiesIdentifierEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelinePropertiesIdentifierEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array<uint8_t, VK_UUID_SIZE> const & pipelineIdentifier_ = {},
                                                             void *                                    pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipelineIdentifier{ pipelineIdentifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelinePropertiesIdentifierEXT( *reinterpret_cast<PipelinePropertiesIdentifierEXT const *>( &rhs ) )
    {
    }

    PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelinePropertiesIdentifierEXT const *>( &rhs );
      return *this;
    }

    operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelinePropertiesIdentifierEXT *>( this );
    }

    operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelinePropertiesIdentifierEXT *>( this );
    }

    operator VkPipelinePropertiesIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelinePropertiesIdentifierEXT *>( this );
    }

    operator VkPipelinePropertiesIdentifierEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelinePropertiesIdentifierEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipelineIdentifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default;
#else
    bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier );
#  endif
    }

    bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType              = StructureType::ePipelinePropertiesIdentifierEXT;
    void *                                pNext              = {};
    ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineIdentifier = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelinePropertiesIdentifierEXT>
  {
    using Type = PipelinePropertiesIdentifierEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelinePropertiesIdentifierEXT>
  {
    using Type = PipelinePropertiesIdentifierEXT;
  };

  // wrapper struct for struct VkPipelineRasterizationConservativeStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationConservativeStateCreateInfoEXT.html
  struct PipelineRasterizationConservativeStateCreateInfoEXT
  {
    using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
      PipelineRasterizationConservativeStateCreateFlagsEXT flags_                            = {},
      ConservativeRasterizationModeEXT                     conservativeRasterizationMode_    = ConservativeRasterizationModeEXT::eDisabled,
      float                                                extraPrimitiveOverestimationSize_ = {},
      const void *                                         pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , conservativeRasterizationMode{ conservativeRasterizationMode_ }
      , extraPrimitiveOverestimationSize{ extraPrimitiveOverestimationSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineRasterizationConservativeStateCreateInfoEXT &
      operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
      setFlags( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
      setConservativeRasterizationMode( ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      conservativeRasterizationMode = conservativeRasterizationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
      setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
    {
      extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineRasterizationConservativeStateCreateFlagsEXT const &,
               ConservativeRasterizationModeEXT const &,
               float const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) &&
             ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
#  endif
    }

    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                        sType                            = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
    const void *                                         pNext                            = {};
    PipelineRasterizationConservativeStateCreateFlagsEXT flags                            = {};
    ConservativeRasterizationModeEXT                     conservativeRasterizationMode    = ConservativeRasterizationModeEXT::eDisabled;
    float                                                extraPrimitiveOverestimationSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationConservativeStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineRasterizationDepthClipStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationDepthClipStateCreateInfoEXT.html
  struct PipelineRasterizationDepthClipStateCreateInfoEXT
  {
    using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateFlagsEXT flags_           = {},
                                                                           Bool32                                            depthClipEnable_ = {},
                                                                           const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , depthClipEnable{ depthClipEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
      setFlags( PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClipEnable = depthClipEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineRasterizationDepthClipStateCreateFlagsEXT const &, Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, depthClipEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable );
#  endif
    }

    bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                     sType           = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
    const void *                                      pNext           = {};
    PipelineRasterizationDepthClipStateCreateFlagsEXT flags           = {};
    Bool32                                            depthClipEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationDepthClipStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineRasterizationLineStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationLineStateCreateInfo.html
  struct PipelineRasterizationLineStateCreateInfo
  {
    using NativeType = VkPipelineRasterizationLineStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationLineStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( LineRasterizationMode lineRasterizationMode_ = LineRasterizationMode::eDefault,
                                                                   Bool32                stippledLineEnable_    = {},
                                                                   uint32_t              lineStippleFactor_     = {},
                                                                   uint16_t              lineStipplePattern_    = {},
                                                                   const void *          pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , lineRasterizationMode{ lineRasterizationMode_ }
      , stippledLineEnable{ stippledLineEnable_ }
      , lineStippleFactor{ lineStippleFactor_ }
      , lineStipplePattern{ lineStipplePattern_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationLineStateCreateInfo( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationLineStateCreateInfo( *reinterpret_cast<PipelineRasterizationLineStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineRasterizationLineStateCreateInfo & operator=( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationLineStateCreateInfo & operator=( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationLineStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo &
      setLineRasterizationMode( LineRasterizationMode lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      lineRasterizationMode = lineRasterizationMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setStippledLineEnable( Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledLineEnable = stippledLineEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      lineStippleFactor = lineStippleFactor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
    {
      lineStipplePattern = lineStipplePattern_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationLineStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationLineStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationLineStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationLineStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, LineRasterizationMode const &, Bool32 const &, uint32_t const &, uint16_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationLineStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) &&
             ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) &&
             ( lineStipplePattern == rhs.lineStipplePattern );
#  endif
    }

    bool operator!=( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType                 = StructureType::ePipelineRasterizationLineStateCreateInfo;
    const void *          pNext                 = {};
    LineRasterizationMode lineRasterizationMode = LineRasterizationMode::eDefault;
    Bool32                stippledLineEnable    = {};
    uint32_t              lineStippleFactor     = {};
    uint16_t              lineStipplePattern    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationLineStateCreateInfo>
  {
    using Type = PipelineRasterizationLineStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfo>
  {
    using Type = PipelineRasterizationLineStateCreateInfo;
  };

  using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo;
  using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo;

  // wrapper struct for struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationProvokingVertexStateCreateInfoEXT.html
  struct PipelineRasterizationProvokingVertexStateCreateInfoEXT
  {
    using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineRasterizationProvokingVertexStateCreateInfoEXT( ProvokingVertexModeEXT provokingVertexMode_ = ProvokingVertexModeEXT::eFirstVertex,
                                                              const void *           pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , provokingVertexMode{ provokingVertexMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineRasterizationProvokingVertexStateCreateInfoEXT &
      operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationProvokingVertexStateCreateInfoEXT &
      operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT &
      setProvokingVertexMode( ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
    {
      provokingVertexMode = provokingVertexMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ProvokingVertexModeEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, provokingVertexMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode );
#  endif
    }

    bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType               = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
    const void *           pNext               = {};
    ProvokingVertexModeEXT provokingVertexMode = ProvokingVertexModeEXT::eFirstVertex;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineRasterizationStateRasterizationOrderAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateRasterizationOrderAMD.html
  struct PipelineRasterizationStateRasterizationOrderAMD
  {
    using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict,
                                                                          const void *          pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rasterizationOrder{ rasterizationOrder_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
    {
    }

    PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD &
      setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationOrder = rasterizationOrder_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

    operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

    operator VkPipelineRasterizationStateRasterizationOrderAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

    operator VkPipelineRasterizationStateRasterizationOrderAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, RasterizationOrderAMD const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rasterizationOrder );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder );
#  endif
    }

    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType              = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
    const void *          pNext              = {};
    RasterizationOrderAMD rasterizationOrder = RasterizationOrderAMD::eStrict;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationStateRasterizationOrderAMD>
  {
    using Type = PipelineRasterizationStateRasterizationOrderAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
  {
    using Type = PipelineRasterizationStateRasterizationOrderAMD;
  };

  // wrapper struct for struct VkPipelineRasterizationStateStreamCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateStreamCreateInfoEXT.html
  struct PipelineRasterizationStateStreamCreateInfoEXT
  {
    using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateFlagsEXT flags_               = {},
                                                                        uint32_t                                       rasterizationStream_ = {},
                                                                        const void *                                   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , rasterizationStream{ rasterizationStream_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &
      setFlags( PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationStream = rasterizationStream_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationStateStreamCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationStateStreamCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PipelineRasterizationStateStreamCreateFlagsEXT const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, rasterizationStream );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream );
#  endif
    }

    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                  sType               = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
    const void *                                   pNext               = {};
    PipelineRasterizationStateStreamCreateFlagsEXT flags               = {};
    uint32_t                                       rasterizationStream = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRasterizationStateStreamCreateInfoEXT>
  {
    using Type = PipelineRasterizationStateStreamCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
  {
    using Type = PipelineRasterizationStateStreamCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineRenderingCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRenderingCreateInfo.html
  struct PipelineRenderingCreateInfo
  {
    using NativeType = VkPipelineRenderingCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRenderingCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t       viewMask_                = {},
                                                      uint32_t       colorAttachmentCount_    = {},
                                                      const Format * pColorAttachmentFormats_ = {},
                                                      Format         depthAttachmentFormat_   = Format::eUndefined,
                                                      Format         stencilAttachmentFormat_ = Format::eUndefined,
                                                      const void *   pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , viewMask{ viewMask_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentFormats{ pColorAttachmentFormats_ }
      , depthAttachmentFormat{ depthAttachmentFormat_ }
      , stencilAttachmentFormat{ stencilAttachmentFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineRenderingCreateInfo( uint32_t                                      viewMask_,
                                 ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_,
                                 Format                                        depthAttachmentFormat_   = Format::eUndefined,
                                 Format                                        stencilAttachmentFormat_ = Format::eUndefined,
                                 const void *                                  pNext_                   = nullptr )
      : pNext( pNext_ )
      , viewMask( viewMask_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
      , pColorAttachmentFormats( colorAttachmentFormats_.data() )
      , depthAttachmentFormat( depthAttachmentFormat_ )
      , stencilAttachmentFormat( stencilAttachmentFormat_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentFormats = pColorAttachmentFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineRenderingCreateInfo & setColorAttachmentFormats( ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount    = static_cast<uint32_t>( colorAttachmentFormats_.size() );
      pColorAttachmentFormats = colorAttachmentFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      depthAttachmentFormat = depthAttachmentFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilAttachmentFormat = stencilAttachmentFormat_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRenderingCreateInfo *>( this );
    }

    operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRenderingCreateInfo *>( this );
    }

    operator VkPipelineRenderingCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRenderingCreateInfo *>( this );
    }

    operator VkPipelineRenderingCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRenderingCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const Format * const &, Format const &, Format const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRenderingCreateInfo const & ) const = default;
#else
    bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
             ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
#  endif
    }

    bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                   = StructureType::ePipelineRenderingCreateInfo;
    const void *   pNext                   = {};
    uint32_t       viewMask                = {};
    uint32_t       colorAttachmentCount    = {};
    const Format * pColorAttachmentFormats = {};
    Format         depthAttachmentFormat   = Format::eUndefined;
    Format         stencilAttachmentFormat = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRenderingCreateInfo>
  {
    using Type = PipelineRenderingCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
  {
    using Type = PipelineRenderingCreateInfo;
  };

  using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;

  // wrapper struct for struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRepresentativeFragmentTestStateCreateInfoNV.html
  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
  {
    using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( Bool32       representativeFragmentTestEnable_ = {},
                                                                              const void * pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , representativeFragmentTestEnable{ representativeFragmentTestEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
    {
    }

    PipelineRepresentativeFragmentTestStateCreateInfoNV &
      operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV &
      setRepresentativeFragmentTestEnable( Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      representativeFragmentTestEnable = representativeFragmentTestEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, representativeFragmentTestEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
#  endif
    }

    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                            = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
    const void *  pNext                            = {};
    Bool32        representativeFragmentTestEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRepresentativeFragmentTestStateCreateInfoNV>
  {
    using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
  {
    using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineRobustnessCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRobustnessCreateInfo.html
  struct PipelineRobustnessCreateInfo
  {
    using NativeType = VkPipelineRobustnessCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineRobustnessCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessBufferBehavior storageBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                                       PipelineRobustnessBufferBehavior uniformBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                                       PipelineRobustnessBufferBehavior vertexInputs_   = PipelineRobustnessBufferBehavior::eDeviceDefault,
                                                       PipelineRobustnessImageBehavior  images_         = PipelineRobustnessImageBehavior::eDeviceDefault,
                                                       const void *                     pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , storageBuffers{ storageBuffers_ }
      , uniformBuffers{ uniformBuffers_ }
      , vertexInputs{ vertexInputs_ }
      , images{ images_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRobustnessCreateInfo( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRobustnessCreateInfo( *reinterpret_cast<PipelineRobustnessCreateInfo const *>( &rhs ) )
    {
    }

    PipelineRobustnessCreateInfo & operator=( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineRobustnessCreateInfo & operator=( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineRobustnessCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setStorageBuffers( PipelineRobustnessBufferBehavior storageBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffers = storageBuffers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setUniformBuffers( PipelineRobustnessBufferBehavior uniformBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformBuffers = uniformBuffers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setVertexInputs( PipelineRobustnessBufferBehavior vertexInputs_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexInputs = vertexInputs_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setImages( PipelineRobustnessImageBehavior images_ ) VULKAN_HPP_NOEXCEPT
    {
      images = images_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineRobustnessCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRobustnessCreateInfo *>( this );
    }

    operator VkPipelineRobustnessCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRobustnessCreateInfo *>( this );
    }

    operator VkPipelineRobustnessCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineRobustnessCreateInfo *>( this );
    }

    operator VkPipelineRobustnessCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineRobustnessCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessBufferBehavior const &,
               PipelineRobustnessImageBehavior const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRobustnessCreateInfo const & ) const = default;
#else
    bool operator==( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) &&
             ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images );
#  endif
    }

    bool operator!=( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType          = StructureType::ePipelineRobustnessCreateInfo;
    const void *                     pNext          = {};
    PipelineRobustnessBufferBehavior storageBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior uniformBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessBufferBehavior vertexInputs   = PipelineRobustnessBufferBehavior::eDeviceDefault;
    PipelineRobustnessImageBehavior  images         = PipelineRobustnessImageBehavior::eDeviceDefault;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineRobustnessCreateInfo>
  {
    using Type = PipelineRobustnessCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineRobustnessCreateInfo>
  {
    using Type = PipelineRobustnessCreateInfo;
  };

  using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo;

  // wrapper struct for struct VkPipelineSampleLocationsStateCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineSampleLocationsStateCreateInfoEXT.html
  struct PipelineSampleLocationsStateCreateInfoEXT
  {
    using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( Bool32                 sampleLocationsEnable_ = {},
                                                                    SampleLocationsInfoEXT sampleLocationsInfo_   = {},
                                                                    const void *           pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampleLocationsEnable{ sampleLocationsEnable_ }
      , sampleLocationsInfo{ sampleLocationsInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsEnable = sampleLocationsEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
      setSampleLocationsInfo( SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

    operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

    operator VkPipelineSampleLocationsStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

    operator VkPipelineSampleLocationsStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, SampleLocationsInfoEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) &&
             ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#  endif
    }

    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType                 = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
    const void *           pNext                 = {};
    Bool32                 sampleLocationsEnable = {};
    SampleLocationsInfoEXT sampleLocationsInfo   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineSampleLocationsStateCreateInfoEXT>
  {
    using Type = PipelineSampleLocationsStateCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
  {
    using Type = PipelineSampleLocationsStateCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageModuleIdentifierCreateInfoEXT.html
  struct PipelineShaderStageModuleIdentifierCreateInfoEXT
  {
    using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t        identifierSize_ = {},
                                                                           const uint8_t * pIdentifier_    = {},
                                                                           const void *    pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , identifierSize{ identifierSize_ }
      , pIdentifier{ pIdentifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast<PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineShaderStageModuleIdentifierCreateInfoEXT( ArrayProxyNoTemporaries<const uint8_t> const & identifier_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), identifierSize( static_cast<uint32_t>( identifier_.size() ) ), pIdentifier( identifier_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineShaderStageModuleIdentifierCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT
    {
      identifierSize = identifierSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT
    {
      pIdentifier = pIdentifier_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifier( ArrayProxyNoTemporaries<const uint8_t> const & identifier_ ) VULKAN_HPP_NOEXCEPT
    {
      identifierSize = static_cast<uint32_t>( identifier_.size() );
      pIdentifier    = identifier_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
    }

    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
    }

    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
    }

    operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineShaderStageModuleIdentifierCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint8_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, identifierSize, pIdentifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier );
#  endif
    }

    bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType          = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT;
    const void *    pNext          = {};
    uint32_t        identifierSize = {};
    const uint8_t * pIdentifier    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineShaderStageModuleIdentifierCreateInfoEXT>
  {
    using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT>
  {
    using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkPipelineShaderStageNodeCreateInfoAMDX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageNodeCreateInfoAMDX.html
  struct PipelineShaderStageNodeCreateInfoAMDX
  {
    using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pName{ pName_ }
      , index{ index_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast<PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs ) )
    {
    }

    PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineShaderStageNodeCreateInfoAMDX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
    }

    operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
    }

    operator VkPipelineShaderStageNodeCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
    }

    operator VkPipelineShaderStageNodeCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineShaderStageNodeCreateInfoAMDX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const char * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pName, index );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = index <=> rhs.index; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index );
    }

    bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX;
    const void *  pNext = {};
    const char *  pName = {};
    uint32_t      index = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineShaderStageNodeCreateInfoAMDX>
  {
    using Type = PipelineShaderStageNodeCreateInfoAMDX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageNodeCreateInfoAMDX>
  {
    using Type = PipelineShaderStageNodeCreateInfoAMDX;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageRequiredSubgroupSizeCreateInfo.html
  struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
  {
    using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t     requiredSubgroupSize_ = {},
                                                                            const void * pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , requiredSubgroupSize{ requiredSubgroupSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
    {
    }

    PipelineShaderStageRequiredSubgroupSizeCreateInfo &
      operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfo & setRequiredSubgroupSize( uint32_t requiredSubgroupSize_ ) VULKAN_HPP_NOEXCEPT
    {
      requiredSubgroupSize = requiredSubgroupSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
    }

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
    }

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
    }

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, requiredSubgroupSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default;
#else
    bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
#  endif
    }

    bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
    const void *  pNext                = {};
    uint32_t      requiredSubgroupSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo>
  {
    using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
  {
    using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
  };

  using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
  using ShaderRequiredSubgroupSizeCreateInfoEXT              = PipelineShaderStageRequiredSubgroupSizeCreateInfo;

  // wrapper struct for struct VkPipelineTessellationDomainOriginStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationDomainOriginStateCreateInfo.html
  struct PipelineTessellationDomainOriginStateCreateInfo
  {
    using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( TessellationDomainOrigin domainOrigin_ = TessellationDomainOrigin::eUpperLeft,
                                                                          const void *             pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , domainOrigin{ domainOrigin_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
    {
    }

    PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
    {
      domainOrigin = domainOrigin_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationDomainOriginStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationDomainOriginStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TessellationDomainOrigin const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, domainOrigin );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin );
#  endif
    }

    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType        = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
    const void *             pNext        = {};
    TessellationDomainOrigin domainOrigin = TessellationDomainOrigin::eUpperLeft;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineTessellationDomainOriginStateCreateInfo>
  {
    using Type = PipelineTessellationDomainOriginStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
  {
    using Type = PipelineTessellationDomainOriginStateCreateInfo;
  };

  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;

  // wrapper struct for struct VkVertexInputBindingDivisorDescription, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDivisorDescription.html
  struct VertexInputBindingDivisorDescription
  {
    using NativeType = VkVertexInputBindingDivisorDescription;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
      : binding{ binding_ }
      , divisor{ divisor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDivisorDescription( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDivisorDescription( *reinterpret_cast<VertexInputBindingDivisorDescription const *>( &rhs ) )
    {
    }

    VertexInputBindingDivisorDescription & operator=( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VertexInputBindingDivisorDescription & operator=( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VertexInputBindingDivisorDescription const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
    {
      divisor = divisor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVertexInputBindingDivisorDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDivisorDescription *>( this );
    }

    operator VkVertexInputBindingDivisorDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDivisorDescription *>( this );
    }

    operator VkVertexInputBindingDivisorDescription const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVertexInputBindingDivisorDescription *>( this );
    }

    operator VkVertexInputBindingDivisorDescription *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVertexInputBindingDivisorDescription *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( binding, divisor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDivisorDescription const & ) const = default;
#else
    bool operator==( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( binding == rhs.binding ) && ( divisor == rhs.divisor );
#  endif
    }

    bool operator!=( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t binding = {};
    uint32_t divisor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVertexInputBindingDivisorDescription>
  {
    using Type = VertexInputBindingDivisorDescription;
  };
#endif
  using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription;
  using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription;

  // wrapper struct for struct VkPipelineVertexInputDivisorStateCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputDivisorStateCreateInfo.html
  struct PipelineVertexInputDivisorStateCreateInfo
  {
    using NativeType = VkPipelineVertexInputDivisorStateCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineVertexInputDivisorStateCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( uint32_t                                     vertexBindingDivisorCount_ = {},
                                                                    const VertexInputBindingDivisorDescription * pVertexBindingDivisors_    = {},
                                                                    const void *                                 pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , vertexBindingDivisorCount{ vertexBindingDivisorCount_ }
      , pVertexBindingDivisors{ pVertexBindingDivisors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputDivisorStateCreateInfo( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineVertexInputDivisorStateCreateInfo( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputDivisorStateCreateInfo( ArrayProxyNoTemporaries<const VertexInputBindingDivisorDescription> const & vertexBindingDivisors_,
                                               const void *                                                                pNext_ = nullptr )
      : pNext( pNext_ )
      , vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) )
      , pVertexBindingDivisors( vertexBindingDivisors_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineVertexInputDivisorStateCreateInfo & operator=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineVertexInputDivisorStateCreateInfo & operator=( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDivisorCount = vertexBindingDivisorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo &
      setPVertexBindingDivisors( const VertexInputBindingDivisorDescription * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexBindingDivisors = pVertexBindingDivisors_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputDivisorStateCreateInfo &
      setVertexBindingDivisors( ArrayProxyNoTemporaries<const VertexInputBindingDivisorDescription> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
      pVertexBindingDivisors    = vertexBindingDivisors_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineVertexInputDivisorStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputDivisorStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputDivisorStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputDivisorStateCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const VertexInputBindingDivisorDescription * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineVertexInputDivisorStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) &&
             ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
#  endif
    }

    bool operator!=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                sType                     = StructureType::ePipelineVertexInputDivisorStateCreateInfo;
    const void *                                 pNext                     = {};
    uint32_t                                     vertexBindingDivisorCount = {};
    const VertexInputBindingDivisorDescription * pVertexBindingDivisors    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineVertexInputDivisorStateCreateInfo>
  {
    using Type = PipelineVertexInputDivisorStateCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfo>
  {
    using Type = PipelineVertexInputDivisorStateCreateInfo;
  };

  using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo;
  using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo;

  // wrapper struct for struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportCoarseSampleOrderStateCreateInfoNV.html
  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
  {
    using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( CoarseSampleOrderTypeNV sampleOrderType_ = CoarseSampleOrderTypeNV::eDefault,
                                                                             uint32_t                customSampleOrderCount_        = {},
                                                                             const CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {},
                                                                             const void *                      pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampleOrderType{ sampleOrderType_ }
      , customSampleOrderCount{ customSampleOrderCount_ }
      , pCustomSampleOrders{ pCustomSampleOrders_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportCoarseSampleOrderStateCreateInfoNV( CoarseSampleOrderTypeNV                                          sampleOrderType_,
                                                        ArrayProxyNoTemporaries<const CoarseSampleOrderCustomNV> const & customSampleOrders_,
                                                        const void *                                                     pNext_ = nullptr )
      : pNext( pNext_ )
      , sampleOrderType( sampleOrderType_ )
      , customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) )
      , pCustomSampleOrders( customSampleOrders_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setSampleOrderType( CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleOrderType = sampleOrderType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
    {
      customSampleOrderCount = customSampleOrderCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setPCustomSampleOrders( const CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
    {
      pCustomSampleOrders = pCustomSampleOrders_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setCustomSampleOrders( ArrayProxyNoTemporaries<const CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
    {
      customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
      pCustomSampleOrders    = customSampleOrders_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CoarseSampleOrderTypeNV const &, uint32_t const &, const CoarseSampleOrderCustomNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) &&
             ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
#  endif
    }

    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                  = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
    const void *                      pNext                  = {};
    CoarseSampleOrderTypeNV           sampleOrderType        = CoarseSampleOrderTypeNV::eDefault;
    uint32_t                          customSampleOrderCount = {};
    const CoarseSampleOrderCustomNV * pCustomSampleOrders    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV>
  {
    using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
  {
    using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
  };

  // wrapper struct for struct VkPipelineViewportDepthClampControlCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClampControlCreateInfoEXT.html
  struct PipelineViewportDepthClampControlCreateInfoEXT
  {
    using NativeType = VkPipelineViewportDepthClampControlCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT( DepthClampModeEXT          depthClampMode_   = DepthClampModeEXT::eViewportRange,
                                                                         const DepthClampRangeEXT * pDepthClampRange_ = {},
                                                                         const void *               pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthClampMode{ depthClampMode_ }
      , pDepthClampRange{ pDepthClampRange_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineViewportDepthClampControlCreateInfoEXT( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportDepthClampControlCreateInfoEXT( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportDepthClampControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClampControlCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineViewportDepthClampControlCreateInfoEXT & operator=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportDepthClampControlCreateInfoEXT & operator=( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportDepthClampControlCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setDepthClampMode( DepthClampModeEXT depthClampMode_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClampMode = depthClampMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT &
      setPDepthClampRange( const DepthClampRangeEXT * pDepthClampRange_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthClampRange = pDepthClampRange_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportDepthClampControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportDepthClampControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClampControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportDepthClampControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClampControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportDepthClampControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClampControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportDepthClampControlCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DepthClampModeEXT const &, const DepthClampRangeEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthClampMode, pDepthClampRange );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportDepthClampControlCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampMode == rhs.depthClampMode ) && ( pDepthClampRange == rhs.pDepthClampRange );
#  endif
    }

    bool operator!=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType            = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT;
    const void *               pNext            = {};
    DepthClampModeEXT          depthClampMode   = DepthClampModeEXT::eViewportRange;
    const DepthClampRangeEXT * pDepthClampRange = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportDepthClampControlCreateInfoEXT>
  {
    using Type = PipelineViewportDepthClampControlCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportDepthClampControlCreateInfoEXT>
  {
    using Type = PipelineViewportDepthClampControlCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineViewportDepthClipControlCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClipControlCreateInfoEXT.html
  struct PipelineViewportDepthClipControlCreateInfoEXT
  {
    using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( Bool32 negativeOneToOne_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , negativeOneToOne{ negativeOneToOne_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
    {
    }

    PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setNegativeOneToOne( Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
    {
      negativeOneToOne = negativeOneToOne_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClipControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
    }

    operator VkPipelineViewportDepthClipControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, negativeOneToOne );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne );
#  endif
    }

    bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
    const void *  pNext            = {};
    Bool32        negativeOneToOne = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportDepthClipControlCreateInfoEXT>
  {
    using Type = PipelineViewportDepthClipControlCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportDepthClipControlCreateInfoEXT>
  {
    using Type = PipelineViewportDepthClipControlCreateInfoEXT;
  };

  // wrapper struct for struct VkPipelineViewportExclusiveScissorStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportExclusiveScissorStateCreateInfoNV.html
  struct PipelineViewportExclusiveScissorStateCreateInfoNV
  {
    using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t       exclusiveScissorCount_ = {},
                                                                            const Rect2D * pExclusiveScissors_    = {},
                                                                            const void *   pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , exclusiveScissorCount{ exclusiveScissorCount_ }
      , pExclusiveScissors{ pExclusiveScissors_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportExclusiveScissorStateCreateInfoNV( ArrayProxyNoTemporaries<const Rect2D> const & exclusiveScissors_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportExclusiveScissorStateCreateInfoNV &
      operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissorCount = exclusiveScissorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
    {
      pExclusiveScissors = pExclusiveScissors_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportExclusiveScissorStateCreateInfoNV &
      setExclusiveScissors( ArrayProxyNoTemporaries<const Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
      pExclusiveScissors    = exclusiveScissors_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Rect2D * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) &&
             ( pExclusiveScissors == rhs.pExclusiveScissors );
#  endif
    }

    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                 = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
    const void *   pNext                 = {};
    uint32_t       exclusiveScissorCount = {};
    const Rect2D * pExclusiveScissors    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportExclusiveScissorStateCreateInfoNV>
  {
    using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
  {
    using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
  };

  // wrapper struct for struct VkShadingRatePaletteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShadingRatePaletteNV.html
  struct ShadingRatePaletteNV
  {
    using NativeType = VkShadingRatePaletteNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t                          shadingRatePaletteEntryCount_ = {},
                                               const ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_   = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRatePaletteEntryCount{ shadingRatePaletteEntryCount_ }
      , pShadingRatePaletteEntries{ pShadingRatePaletteEntries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShadingRatePaletteNV( ArrayProxyNoTemporaries<const ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
      : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) )
      , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV &
      setPShadingRatePaletteEntries( const ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShadingRatePaletteNV &
      setShadingRatePaletteEntries( ArrayProxyNoTemporaries<const ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
      pShadingRatePaletteEntries   = shadingRatePaletteEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShadingRatePaletteNV *>( this );
    }

    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShadingRatePaletteNV *>( this );
    }

    operator VkShadingRatePaletteNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShadingRatePaletteNV *>( this );
    }

    operator VkShadingRatePaletteNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShadingRatePaletteNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, const ShadingRatePaletteEntryNV * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShadingRatePaletteNV const & ) const = default;
#else
    bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
#  endif
    }

    bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                          shadingRatePaletteEntryCount = {};
    const ShadingRatePaletteEntryNV * pShadingRatePaletteEntries   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShadingRatePaletteNV>
  {
    using Type = ShadingRatePaletteNV;
  };
#endif

  // wrapper struct for struct VkPipelineViewportShadingRateImageStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportShadingRateImageStateCreateInfoNV.html
  struct PipelineViewportShadingRateImageStateCreateInfoNV
  {
    using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( Bool32                       shadingRateImageEnable_ = {},
                                                                            uint32_t                     viewportCount_          = {},
                                                                            const ShadingRatePaletteNV * pShadingRatePalettes_   = {},
                                                                            const void *                 pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , shadingRateImageEnable{ shadingRateImageEnable_ }
      , viewportCount{ viewportCount_ }
      , pShadingRatePalettes{ pShadingRatePalettes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportShadingRateImageStateCreateInfoNV( Bool32                                                      shadingRateImageEnable_,
                                                       ArrayProxyNoTemporaries<const ShadingRatePaletteNV> const & shadingRatePalettes_,
                                                       const void *                                                pNext_ = nullptr )
      : pNext( pNext_ )
      , shadingRateImageEnable( shadingRateImageEnable_ )
      , viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) )
      , pShadingRatePalettes( shadingRatePalettes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportShadingRateImageStateCreateInfoNV &
      operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateImageEnable = shadingRateImageEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
      setPShadingRatePalettes( const ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
    {
      pShadingRatePalettes = pShadingRatePalettes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportShadingRateImageStateCreateInfoNV &
      setShadingRatePalettes( ArrayProxyNoTemporaries<const ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount        = static_cast<uint32_t>( shadingRatePalettes_.size() );
      pShadingRatePalettes = shadingRatePalettes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, const ShadingRatePaletteNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) &&
             ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
#  endif
    }

    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                  = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
    const void *                 pNext                  = {};
    Bool32                       shadingRateImageEnable = {};
    uint32_t                     viewportCount          = {};
    const ShadingRatePaletteNV * pShadingRatePalettes   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportShadingRateImageStateCreateInfoNV>
  {
    using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
  {
    using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
  };

  // wrapper struct for struct VkViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportSwizzleNV.html
  struct ViewportSwizzleNV
  {
    using NativeType = VkViewportSwizzleNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX,
                                            ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX,
                                            ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX,
                                            ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
      : x{ x_ }
      , y{ y_ }
      , z{ z_ }
      , w{ w_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) ) {}

    ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ViewportSwizzleNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
    {
      w = w_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewportSwizzleNV *>( this );
    }

    operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewportSwizzleNV *>( this );
    }

    operator VkViewportSwizzleNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkViewportSwizzleNV *>( this );
    }

    operator VkViewportSwizzleNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkViewportSwizzleNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<ViewportCoordinateSwizzleNV const &, ViewportCoordinateSwizzleNV const &, ViewportCoordinateSwizzleNV const &, ViewportCoordinateSwizzleNV const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( x, y, z, w );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViewportSwizzleNV const & ) const = default;
#else
    bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w );
#  endif
    }

    bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ViewportCoordinateSwizzleNV x = ViewportCoordinateSwizzleNV::ePositiveX;
    ViewportCoordinateSwizzleNV y = ViewportCoordinateSwizzleNV::ePositiveX;
    ViewportCoordinateSwizzleNV z = ViewportCoordinateSwizzleNV::ePositiveX;
    ViewportCoordinateSwizzleNV w = ViewportCoordinateSwizzleNV::ePositiveX;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkViewportSwizzleNV>
  {
    using Type = ViewportSwizzleNV;
  };
#endif

  // wrapper struct for struct VkPipelineViewportSwizzleStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportSwizzleStateCreateInfoNV.html
  struct PipelineViewportSwizzleStateCreateInfoNV
  {
    using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_             = {},
                                                                   uint32_t                                  viewportCount_     = {},
                                                                   const ViewportSwizzleNV *                 pViewportSwizzles_ = {},
                                                                   const void *                              pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , viewportCount{ viewportCount_ }
      , pViewportSwizzles{ pViewportSwizzles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV                flags_,
                                              ArrayProxyNoTemporaries<const ViewportSwizzleNV> const & viewportSwizzles_,
                                              const void *                                             pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportSwizzles = pViewportSwizzles_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportSwizzleStateCreateInfoNV &
      setViewportSwizzles( ArrayProxyNoTemporaries<const ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount     = static_cast<uint32_t>( viewportSwizzles_.size() );
      pViewportSwizzles = viewportSwizzles_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportSwizzleStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportSwizzleStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, PipelineViewportSwizzleStateCreateFlagsNV const &, uint32_t const &, const ViewportSwizzleNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) &&
             ( pViewportSwizzles == rhs.pViewportSwizzles );
#  endif
    }

    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                             sType             = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
    const void *                              pNext             = {};
    PipelineViewportSwizzleStateCreateFlagsNV flags             = {};
    uint32_t                                  viewportCount     = {};
    const ViewportSwizzleNV *                 pViewportSwizzles = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportSwizzleStateCreateInfoNV>
  {
    using Type = PipelineViewportSwizzleStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
  {
    using Type = PipelineViewportSwizzleStateCreateInfoNV;
  };

  // wrapper struct for struct VkViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportWScalingNV.html
  struct ViewportWScalingNV
  {
    using NativeType = VkViewportWScalingNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT
      : xcoeff{ xcoeff_ }
      , ycoeff{ ycoeff_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) ) {}

    ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ViewportWScalingNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
    {
      xcoeff = xcoeff_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
    {
      ycoeff = ycoeff_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewportWScalingNV *>( this );
    }

    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewportWScalingNV *>( this );
    }

    operator VkViewportWScalingNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkViewportWScalingNV *>( this );
    }

    operator VkViewportWScalingNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkViewportWScalingNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<float const &, float const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( xcoeff, ycoeff );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViewportWScalingNV const & ) const = default;
#else
    bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff );
#  endif
    }

    bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float xcoeff = {};
    float ycoeff = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkViewportWScalingNV>
  {
    using Type = ViewportWScalingNV;
  };
#endif

  // wrapper struct for struct VkPipelineViewportWScalingStateCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportWScalingStateCreateInfoNV.html
  struct PipelineViewportWScalingStateCreateInfoNV
  {
    using NativeType = VkPipelineViewportWScalingStateCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePipelineViewportWScalingStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( Bool32                     viewportWScalingEnable_ = {},
                                                                    uint32_t                   viewportCount_          = {},
                                                                    const ViewportWScalingNV * pViewportWScalings_     = {},
                                                                    const void *               pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , viewportWScalingEnable{ viewportWScalingEnable_ }
      , viewportCount{ viewportCount_ }
      , pViewportWScalings{ pViewportWScalings_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportWScalingStateCreateInfoNV( Bool32                                                    viewportWScalingEnable_,
                                               ArrayProxyNoTemporaries<const ViewportWScalingNV> const & viewportWScalings_,
                                               const void *                                              pNext_ = nullptr )
      : pNext( pNext_ )
      , viewportWScalingEnable( viewportWScalingEnable_ )
      , viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) )
      , pViewportWScalings( viewportWScalings_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportWScalingEnable = viewportWScalingEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
      setPViewportWScalings( const ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportWScalings = pViewportWScalings_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportWScalingStateCreateInfoNV &
      setViewportWScalings( ArrayProxyNoTemporaries<const ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount      = static_cast<uint32_t>( viewportWScalings_.size() );
      pViewportWScalings = viewportWScalings_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportWScalingStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportWScalingStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, const ViewportWScalingNV * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) &&
             ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings );
#  endif
    }

    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                  = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
    const void *               pNext                  = {};
    Bool32                     viewportWScalingEnable = {};
    uint32_t                   viewportCount          = {};
    const ViewportWScalingNV * pViewportWScalings     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPipelineViewportWScalingStateCreateInfoNV>
  {
    using Type = PipelineViewportWScalingStateCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
  {
    using Type = PipelineViewportWScalingStateCreateInfoNV;
  };

#if defined( VK_USE_PLATFORM_GGP )
  // wrapper struct for struct VkPresentFrameTokenGGP, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentFrameTokenGGP.html
  struct PresentFrameTokenGGP
  {
    using NativeType = VkPresentFrameTokenGGP;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentFrameTokenGGP;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , frameToken{ frameToken_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
    {
    }

    PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
    {
      frameToken = frameToken_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentFrameTokenGGP *>( this );
    }

    operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentFrameTokenGGP *>( this );
    }

    operator VkPresentFrameTokenGGP const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentFrameTokenGGP *>( this );
    }

    operator VkPresentFrameTokenGGP *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentFrameTokenGGP *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, GgpFrameToken const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, frameToken );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
    }

    bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType sType      = StructureType::ePresentFrameTokenGGP;
    const void *  pNext      = {};
    GgpFrameToken frameToken = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentFrameTokenGGP>
  {
    using Type = PresentFrameTokenGGP;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
  {
    using Type = PresentFrameTokenGGP;
  };
#endif /*VK_USE_PLATFORM_GGP*/

  // wrapper struct for struct VkPresentId2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentId2KHR.html
  struct PresentId2KHR
  {
    using NativeType = VkPresentId2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentId2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentId2KHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pPresentIds{ pPresentIds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentId2KHR( PresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentId2KHR( VkPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentId2KHR( *reinterpret_cast<PresentId2KHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentId2KHR( ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentId2KHR & operator=( PresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentId2KHR & operator=( VkPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentId2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentId2KHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentIds = pPresentIds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentId2KHR & setPresentIds( ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( presentIds_.size() );
      pPresentIds    = presentIds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentId2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentId2KHR *>( this );
    }

    operator VkPresentId2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentId2KHR *>( this );
    }

    operator VkPresentId2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentId2KHR *>( this );
    }

    operator VkPresentId2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentId2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pPresentIds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentId2KHR const & ) const = default;
#else
    bool operator==( PresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds );
#  endif
    }

    bool operator!=( PresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType          = StructureType::ePresentId2KHR;
    const void *     pNext          = {};
    uint32_t         swapchainCount = {};
    const uint64_t * pPresentIds    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentId2KHR>
  {
    using Type = PresentId2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentId2KHR>
  {
    using Type = PresentId2KHR;
  };

  // wrapper struct for struct VkPresentIdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentIdKHR.html
  struct PresentIdKHR
  {
    using NativeType = VkPresentIdKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentIdKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pPresentIds{ pPresentIds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentIdKHR( ArrayProxyNoTemporaries<const uint64_t> const & presentIds_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentIdKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentIds = pPresentIds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentIdKHR & setPresentIds( ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( presentIds_.size() );
      pPresentIds    = presentIds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentIdKHR *>( this );
    }

    operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentIdKHR *>( this );
    }

    operator VkPresentIdKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentIdKHR *>( this );
    }

    operator VkPresentIdKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentIdKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pPresentIds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentIdKHR const & ) const = default;
#else
    bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds );
#  endif
    }

    bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType          = StructureType::ePresentIdKHR;
    const void *     pNext          = {};
    uint32_t         swapchainCount = {};
    const uint64_t * pPresentIds    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentIdKHR>
  {
    using Type = PresentIdKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentIdKHR>
  {
    using Type = PresentIdKHR;
  };

  // wrapper struct for struct VkPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentInfoKHR.html
  struct PresentInfoKHR
  {
    using NativeType = VkPresentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t             waitSemaphoreCount_ = {},
                                         const Semaphore *    pWaitSemaphores_    = {},
                                         uint32_t             swapchainCount_     = {},
                                         const SwapchainKHR * pSwapchains_        = {},
                                         const uint32_t *     pImageIndices_      = {},
                                         Result *             pResults_           = {},
                                         const void *         pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreCount{ waitSemaphoreCount_ }
      , pWaitSemaphores{ pWaitSemaphores_ }
      , swapchainCount{ swapchainCount_ }
      , pSwapchains{ pSwapchains_ }
      , pImageIndices{ pImageIndices_ }
      , pResults{ pResults_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR( ArrayProxyNoTemporaries<const Semaphore> const &    waitSemaphores_,
                    ArrayProxyNoTemporaries<const SwapchainKHR> const & swapchains_   = {},
                    ArrayProxyNoTemporaries<const uint32_t> const &     imageIndices_ = {},
                    ArrayProxyNoTemporaries<Result> const &             results_      = {},
                    const void *                                        pNext_        = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , swapchainCount( static_cast<uint32_t>( swapchains_.size() ) )
      , pSwapchains( swapchains_.data() )
      , pImageIndices( imageIndices_.data() )
      , pResults( results_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
      VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
      VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
#    else
      if ( swapchains_.size() != imageIndices_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
      }
      if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
      }
      if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setWaitSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
    {
      pSwapchains = pSwapchains_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setSwapchains( ArrayProxyNoTemporaries<const SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( swapchains_.size() );
      pSwapchains    = swapchains_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageIndices = pImageIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setImageIndices( ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
      pImageIndices  = imageIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( Result * pResults_ ) VULKAN_HPP_NOEXCEPT
    {
      pResults = pResults_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setResults( ArrayProxyNoTemporaries<Result> const & results_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( results_.size() );
      pResults       = results_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentInfoKHR *>( this );
    }

    operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentInfoKHR *>( this );
    }

    operator VkPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentInfoKHR *>( this );
    }

    operator VkPresentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const Semaphore * const &,
               uint32_t const &,
               const SwapchainKHR * const &,
               const uint32_t * const &,
               Result * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentInfoKHR const & ) const = default;
#else
    bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) &&
             ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults );
#  endif
    }

    bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType              = StructureType::ePresentInfoKHR;
    const void *         pNext              = {};
    uint32_t             waitSemaphoreCount = {};
    const Semaphore *    pWaitSemaphores    = {};
    uint32_t             swapchainCount     = {};
    const SwapchainKHR * pSwapchains        = {};
    const uint32_t *     pImageIndices      = {};
    Result *             pResults           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentInfoKHR>
  {
    using Type = PresentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentInfoKHR>
  {
    using Type = PresentInfoKHR;
  };

  // wrapper struct for struct VkRectLayerKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRectLayerKHR.html
  struct RectLayerKHR
  {
    using NativeType = VkRectLayerKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RectLayerKHR( Offset2D offset_ = {}, Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
      : offset{ offset_ }
      , extent{ extent_ }
      , layer{ layer_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) ) {}

    explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {}

    RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RectLayerKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
    {
      layer = layer_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRectLayerKHR *>( this );
    }

    operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRectLayerKHR *>( this );
    }

    operator VkRectLayerKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRectLayerKHR *>( this );
    }

    operator VkRectLayerKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRectLayerKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Offset2D const &, Extent2D const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( offset, extent, layer );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RectLayerKHR const & ) const = default;
#else
    bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer );
#  endif
    }

    bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Offset2D offset = {};
    Extent2D extent = {};
    uint32_t layer  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRectLayerKHR>
  {
    using Type = RectLayerKHR;
  };
#endif

  // wrapper struct for struct VkPresentRegionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionKHR.html
  struct PresentRegionKHR
  {
    using NativeType = VkPresentRegionKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
      : rectangleCount{ rectangleCount_ }
      , pRectangles{ pRectangles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionKHR( ArrayProxyNoTemporaries<const RectLayerKHR> const & rectangles_ )
      : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentRegionKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangleCount = rectangleCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      pRectangles = pRectangles_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionKHR & setRectangles( ArrayProxyNoTemporaries<const RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangleCount = static_cast<uint32_t>( rectangles_.size() );
      pRectangles    = rectangles_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentRegionKHR *>( this );
    }

    operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentRegionKHR *>( this );
    }

    operator VkPresentRegionKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentRegionKHR *>( this );
    }

    operator VkPresentRegionKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentRegionKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, const RectLayerKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( rectangleCount, pRectangles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentRegionKHR const & ) const = default;
#else
    bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles );
#  endif
    }

    bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t             rectangleCount = {};
    const RectLayerKHR * pRectangles    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentRegionKHR>
  {
    using Type = PresentRegionKHR;
  };
#endif

  // wrapper struct for struct VkPresentRegionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionsKHR.html
  struct PresentRegionsKHR
  {
    using NativeType = VkPresentRegionsKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentRegionsKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentRegionsKHR( uint32_t swapchainCount_ = {}, const PresentRegionKHR * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionsKHR( ArrayProxyNoTemporaries<const PresentRegionKHR> const & regions_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentRegionsKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionsKHR & setRegions( ArrayProxyNoTemporaries<const PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( regions_.size() );
      pRegions       = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentRegionsKHR *>( this );
    }

    operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentRegionsKHR *>( this );
    }

    operator VkPresentRegionsKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentRegionsKHR *>( this );
    }

    operator VkPresentRegionsKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentRegionsKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PresentRegionKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentRegionsKHR const & ) const = default;
#else
    bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType          = StructureType::ePresentRegionsKHR;
    const void *             pNext          = {};
    uint32_t                 swapchainCount = {};
    const PresentRegionKHR * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentRegionsKHR>
  {
    using Type = PresentRegionsKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentRegionsKHR>
  {
    using Type = PresentRegionsKHR;
  };

  // wrapper struct for struct VkPresentTimeGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimeGOOGLE.html
  struct PresentTimeGOOGLE
  {
    using NativeType = VkPresentTimeGOOGLE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
      : presentID{ presentID_ }
      , desiredPresentTime{ desiredPresentTime_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) ) {}

    PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
    {
      presentID = presentID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
    {
      desiredPresentTime = desiredPresentTime_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimeGOOGLE *>( this );
    }

    operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimeGOOGLE *>( this );
    }

    operator VkPresentTimeGOOGLE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentTimeGOOGLE *>( this );
    }

    operator VkPresentTimeGOOGLE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentTimeGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( presentID, desiredPresentTime );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimeGOOGLE const & ) const = default;
#else
    bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime );
#  endif
    }

    bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t presentID          = {};
    uint64_t desiredPresentTime = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentTimeGOOGLE>
  {
    using Type = PresentTimeGOOGLE;
  };
#endif

  // wrapper struct for struct VkPresentTimesInfoGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimesInfoGOOGLE.html
  struct PresentTimesInfoGOOGLE
  {
    using NativeType = VkPresentTimesInfoGOOGLE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentTimesInfoGOOGLE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, const PresentTimeGOOGLE * pTimes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pTimes{ pTimes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimesInfoGOOGLE( ArrayProxyNoTemporaries<const PresentTimeGOOGLE> const & times_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimes = pTimes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimesInfoGOOGLE & setTimes( ArrayProxyNoTemporaries<const PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( times_.size() );
      pTimes         = times_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE *>( this );
    }

    operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimesInfoGOOGLE *>( this );
    }

    operator VkPresentTimesInfoGOOGLE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentTimesInfoGOOGLE *>( this );
    }

    operator VkPresentTimesInfoGOOGLE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentTimesInfoGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PresentTimeGOOGLE * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pTimes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default;
#else
    bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes );
#  endif
    }

    bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType          = StructureType::ePresentTimesInfoGOOGLE;
    const void *              pNext          = {};
    uint32_t                  swapchainCount = {};
    const PresentTimeGOOGLE * pTimes         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentTimesInfoGOOGLE>
  {
    using Type = PresentTimesInfoGOOGLE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
  {
    using Type = PresentTimesInfoGOOGLE;
  };

  // wrapper struct for struct VkPresentTimingInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimingInfoEXT.html
  struct PresentTimingInfoEXT
  {
    using NativeType = VkPresentTimingInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentTimingInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentTimingInfoEXT( PresentTimingInfoFlagsEXT flags_                        = {},
                                               uint64_t                  targetTime_                   = {},
                                               uint64_t                  timeDomainId_                 = {},
                                               PresentStageFlagsEXT      presentStageQueries_          = {},
                                               PresentStageFlagsEXT      targetTimeDomainPresentStage_ = {},
                                               const void *              pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , targetTime{ targetTime_ }
      , timeDomainId{ timeDomainId_ }
      , presentStageQueries{ presentStageQueries_ }
      , targetTimeDomainPresentStage{ targetTimeDomainPresentStage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentTimingInfoEXT( PresentTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimingInfoEXT( VkPresentTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimingInfoEXT( *reinterpret_cast<PresentTimingInfoEXT const *>( &rhs ) )
    {
    }

    PresentTimingInfoEXT & operator=( PresentTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentTimingInfoEXT & operator=( VkPresentTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentTimingInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setFlags( PresentTimingInfoFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setTargetTime( uint64_t targetTime_ ) VULKAN_HPP_NOEXCEPT
    {
      targetTime = targetTime_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setTimeDomainId( uint64_t timeDomainId_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainId = timeDomainId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setPresentStageQueries( PresentStageFlagsEXT presentStageQueries_ ) VULKAN_HPP_NOEXCEPT
    {
      presentStageQueries = presentStageQueries_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingInfoEXT & setTargetTimeDomainPresentStage( PresentStageFlagsEXT targetTimeDomainPresentStage_ ) VULKAN_HPP_NOEXCEPT
    {
      targetTimeDomainPresentStage = targetTimeDomainPresentStage_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentTimingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimingInfoEXT *>( this );
    }

    operator VkPresentTimingInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimingInfoEXT *>( this );
    }

    operator VkPresentTimingInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentTimingInfoEXT *>( this );
    }

    operator VkPresentTimingInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentTimingInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PresentTimingInfoFlagsEXT const &,
               uint64_t const &,
               uint64_t const &,
               PresentStageFlagsEXT const &,
               PresentStageFlagsEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, targetTime, timeDomainId, presentStageQueries, targetTimeDomainPresentStage );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimingInfoEXT const & ) const = default;
#else
    bool operator==( PresentTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( targetTime == rhs.targetTime ) &&
             ( timeDomainId == rhs.timeDomainId ) && ( presentStageQueries == rhs.presentStageQueries ) &&
             ( targetTimeDomainPresentStage == rhs.targetTimeDomainPresentStage );
#  endif
    }

    bool operator!=( PresentTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                        = StructureType::ePresentTimingInfoEXT;
    const void *              pNext                        = {};
    PresentTimingInfoFlagsEXT flags                        = {};
    uint64_t                  targetTime                   = {};
    uint64_t                  timeDomainId                 = {};
    PresentStageFlagsEXT      presentStageQueries          = {};
    PresentStageFlagsEXT      targetTimeDomainPresentStage = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentTimingInfoEXT>
  {
    using Type = PresentTimingInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentTimingInfoEXT>
  {
    using Type = PresentTimingInfoEXT;
  };

  // wrapper struct for struct VkPresentTimingSurfaceCapabilitiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimingSurfaceCapabilitiesEXT.html
  struct PresentTimingSurfaceCapabilitiesEXT
  {
    using NativeType = VkPresentTimingSurfaceCapabilitiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentTimingSurfaceCapabilitiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentTimingSurfaceCapabilitiesEXT( Bool32               presentTimingSupported_         = {},
                                                              Bool32               presentAtAbsoluteTimeSupported_ = {},
                                                              Bool32               presentAtRelativeTimeSupported_ = {},
                                                              PresentStageFlagsEXT presentStageQueries_            = {},
                                                              void *               pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentTimingSupported{ presentTimingSupported_ }
      , presentAtAbsoluteTimeSupported{ presentAtAbsoluteTimeSupported_ }
      , presentAtRelativeTimeSupported{ presentAtRelativeTimeSupported_ }
      , presentStageQueries{ presentStageQueries_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentTimingSurfaceCapabilitiesEXT( PresentTimingSurfaceCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimingSurfaceCapabilitiesEXT( VkPresentTimingSurfaceCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimingSurfaceCapabilitiesEXT( *reinterpret_cast<PresentTimingSurfaceCapabilitiesEXT const *>( &rhs ) )
    {
    }

    PresentTimingSurfaceCapabilitiesEXT & operator=( PresentTimingSurfaceCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentTimingSurfaceCapabilitiesEXT & operator=( VkPresentTimingSurfaceCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentTimingSurfaceCapabilitiesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentTimingSurfaceCapabilitiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingSurfaceCapabilitiesEXT & setPresentTimingSupported( Bool32 presentTimingSupported_ ) VULKAN_HPP_NOEXCEPT
    {
      presentTimingSupported = presentTimingSupported_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingSurfaceCapabilitiesEXT &
      setPresentAtAbsoluteTimeSupported( Bool32 presentAtAbsoluteTimeSupported_ ) VULKAN_HPP_NOEXCEPT
    {
      presentAtAbsoluteTimeSupported = presentAtAbsoluteTimeSupported_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingSurfaceCapabilitiesEXT &
      setPresentAtRelativeTimeSupported( Bool32 presentAtRelativeTimeSupported_ ) VULKAN_HPP_NOEXCEPT
    {
      presentAtRelativeTimeSupported = presentAtRelativeTimeSupported_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingSurfaceCapabilitiesEXT & setPresentStageQueries( PresentStageFlagsEXT presentStageQueries_ ) VULKAN_HPP_NOEXCEPT
    {
      presentStageQueries = presentStageQueries_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentTimingSurfaceCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimingSurfaceCapabilitiesEXT *>( this );
    }

    operator VkPresentTimingSurfaceCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimingSurfaceCapabilitiesEXT *>( this );
    }

    operator VkPresentTimingSurfaceCapabilitiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentTimingSurfaceCapabilitiesEXT *>( this );
    }

    operator VkPresentTimingSurfaceCapabilitiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentTimingSurfaceCapabilitiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &, PresentStageFlagsEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentTimingSupported, presentAtAbsoluteTimeSupported, presentAtRelativeTimeSupported, presentStageQueries );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimingSurfaceCapabilitiesEXT const & ) const = default;
#else
    bool operator==( PresentTimingSurfaceCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentTimingSupported == rhs.presentTimingSupported ) &&
             ( presentAtAbsoluteTimeSupported == rhs.presentAtAbsoluteTimeSupported ) &&
             ( presentAtRelativeTimeSupported == rhs.presentAtRelativeTimeSupported ) && ( presentStageQueries == rhs.presentStageQueries );
#  endif
    }

    bool operator!=( PresentTimingSurfaceCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType                          = StructureType::ePresentTimingSurfaceCapabilitiesEXT;
    void *               pNext                          = {};
    Bool32               presentTimingSupported         = {};
    Bool32               presentAtAbsoluteTimeSupported = {};
    Bool32               presentAtRelativeTimeSupported = {};
    PresentStageFlagsEXT presentStageQueries            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentTimingSurfaceCapabilitiesEXT>
  {
    using Type = PresentTimingSurfaceCapabilitiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentTimingSurfaceCapabilitiesEXT>
  {
    using Type = PresentTimingSurfaceCapabilitiesEXT;
  };

  // wrapper struct for struct VkPresentTimingsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimingsInfoEXT.html
  struct PresentTimingsInfoEXT
  {
    using NativeType = VkPresentTimingsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentTimingsInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentTimingsInfoEXT( uint32_t swapchainCount_ = {}, const PresentTimingInfoEXT * pTimingInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pTimingInfos{ pTimingInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentTimingsInfoEXT( PresentTimingsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimingsInfoEXT( VkPresentTimingsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimingsInfoEXT( *reinterpret_cast<PresentTimingsInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimingsInfoEXT( ArrayProxyNoTemporaries<const PresentTimingInfoEXT> const & timingInfos_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( timingInfos_.size() ) ), pTimingInfos( timingInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentTimingsInfoEXT & operator=( PresentTimingsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentTimingsInfoEXT & operator=( VkPresentTimingsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentTimingsInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentTimingsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingsInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentTimingsInfoEXT & setPTimingInfos( const PresentTimingInfoEXT * pTimingInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimingInfos = pTimingInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimingsInfoEXT & setTimingInfos( ArrayProxyNoTemporaries<const PresentTimingInfoEXT> const & timingInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( timingInfos_.size() );
      pTimingInfos   = timingInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentTimingsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimingsInfoEXT *>( this );
    }

    operator VkPresentTimingsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimingsInfoEXT *>( this );
    }

    operator VkPresentTimingsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentTimingsInfoEXT *>( this );
    }

    operator VkPresentTimingsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentTimingsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PresentTimingInfoEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pTimingInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimingsInfoEXT const & ) const = default;
#else
    bool operator==( PresentTimingsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimingInfos == rhs.pTimingInfos );
#  endif
    }

    bool operator!=( PresentTimingsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType          = StructureType::ePresentTimingsInfoEXT;
    const void *                 pNext          = {};
    uint32_t                     swapchainCount = {};
    const PresentTimingInfoEXT * pTimingInfos   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentTimingsInfoEXT>
  {
    using Type = PresentTimingsInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentTimingsInfoEXT>
  {
    using Type = PresentTimingsInfoEXT;
  };

  // wrapper struct for struct VkPresentWait2InfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentWait2InfoKHR.html
  struct PresentWait2InfoKHR
  {
    using NativeType = VkPresentWait2InfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePresentWait2InfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentWait2InfoKHR( uint64_t presentId_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentId{ presentId_ }
      , timeout{ timeout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PresentWait2InfoKHR( PresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentWait2InfoKHR( VkPresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentWait2InfoKHR( *reinterpret_cast<PresentWait2InfoKHR const *>( &rhs ) )
    {
    }

    PresentWait2InfoKHR & operator=( PresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PresentWait2InfoKHR & operator=( VkPresentWait2InfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PresentWait2InfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setPresentId( uint64_t presentId_ ) VULKAN_HPP_NOEXCEPT
    {
      presentId = presentId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PresentWait2InfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
    {
      timeout = timeout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPresentWait2InfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentWait2InfoKHR *>( this );
    }

    operator VkPresentWait2InfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentWait2InfoKHR *>( this );
    }

    operator VkPresentWait2InfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPresentWait2InfoKHR *>( this );
    }

    operator VkPresentWait2InfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPresentWait2InfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentId, timeout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentWait2InfoKHR const & ) const = default;
#else
    bool operator==( PresentWait2InfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ) && ( timeout == rhs.timeout );
#  endif
    }

    bool operator!=( PresentWait2InfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::ePresentWait2InfoKHR;
    const void *  pNext     = {};
    uint64_t      presentId = {};
    uint64_t      timeout   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPresentWait2InfoKHR>
  {
    using Type = PresentWait2InfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePresentWait2InfoKHR>
  {
    using Type = PresentWait2InfoKHR;
  };

  // wrapper struct for struct VkPrivateDataSlotCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlotCreateInfo.html
  struct PrivateDataSlotCreateInfo
  {
    using NativeType = VkPrivateDataSlotCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePrivateDataSlotCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
    {
    }

    PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( this );
    }

    operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPrivateDataSlotCreateInfo *>( this );
    }

    operator VkPrivateDataSlotCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( this );
    }

    operator VkPrivateDataSlotCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPrivateDataSlotCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PrivateDataSlotCreateFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default;
#else
    bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType = StructureType::ePrivateDataSlotCreateInfo;
    const void *               pNext = {};
    PrivateDataSlotCreateFlags flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPrivateDataSlotCreateInfo>
  {
    using Type = PrivateDataSlotCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
  {
    using Type = PrivateDataSlotCreateInfo;
  };

  using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;

  // wrapper struct for struct VkProtectedSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkProtectedSubmitInfo.html
  struct ProtectedSubmitInfo
  {
    using NativeType = VkProtectedSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eProtectedSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , protectedSubmit{ protectedSubmit_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
    {
    }

    ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedSubmit = protectedSubmit_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkProtectedSubmitInfo *>( this );
    }

    operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkProtectedSubmitInfo *>( this );
    }

    operator VkProtectedSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkProtectedSubmitInfo *>( this );
    }

    operator VkProtectedSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkProtectedSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, protectedSubmit );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ProtectedSubmitInfo const & ) const = default;
#else
    bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit );
#  endif
    }

    bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eProtectedSubmitInfo;
    const void *  pNext           = {};
    Bool32        protectedSubmit = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkProtectedSubmitInfo>
  {
    using Type = ProtectedSubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
  {
    using Type = ProtectedSubmitInfo;
  };

  // wrapper struct for struct VkPushConstantsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantsInfo.html
  struct PushConstantsInfo
  {
    using NativeType = VkPushConstantsInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePushConstantsInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PushConstantsInfo( PipelineLayout   layout_     = {},
                                            ShaderStageFlags stageFlags_ = {},
                                            uint32_t         offset_     = {},
                                            uint32_t         size_       = {},
                                            const void *     pValues_    = {},
                                            const void *     pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , layout{ layout_ }
      , stageFlags{ stageFlags_ }
      , offset{ offset_ }
      , size{ size_ }
      , pValues{ pValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PushConstantsInfo( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushConstantsInfo( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantsInfo( *reinterpret_cast<PushConstantsInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PushConstantsInfo(
      PipelineLayout layout_, ShaderStageFlags stageFlags_, uint32_t offset_, ArrayProxyNoTemporaries<const T> const & values_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , layout( layout_ )
      , stageFlags( stageFlags_ )
      , offset( offset_ )
      , size( static_cast<uint32_t>( values_.size() * sizeof( T ) ) )
      , pValues( values_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PushConstantsInfo & operator=( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PushConstantsInfo & operator=( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PushConstantsInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pValues = pValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PushConstantsInfo & setValues( ArrayProxyNoTemporaries<const T> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      size    = static_cast<uint32_t>( values_.size() * sizeof( T ) );
      pValues = values_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPushConstantsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPushConstantsInfo *>( this );
    }

    operator VkPushConstantsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPushConstantsInfo *>( this );
    }

    operator VkPushConstantsInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPushConstantsInfo *>( this );
    }

    operator VkPushConstantsInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPushConstantsInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineLayout const &,
               ShaderStageFlags const &,
               uint32_t const &,
               uint32_t const &,
               const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, layout, stageFlags, offset, size, pValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PushConstantsInfo const & ) const = default;
#else
    bool operator==( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layout == rhs.layout ) && ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) &&
             ( size == rhs.size ) && ( pValues == rhs.pValues );
#  endif
    }

    bool operator!=( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType      = StructureType::ePushConstantsInfo;
    const void *     pNext      = {};
    PipelineLayout   layout     = {};
    ShaderStageFlags stageFlags = {};
    uint32_t         offset     = {};
    uint32_t         size       = {};
    const void *     pValues    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPushConstantsInfo>
  {
    using Type = PushConstantsInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePushConstantsInfo>
  {
    using Type = PushConstantsInfo;
  };

  using PushConstantsInfoKHR = PushConstantsInfo;

  // wrapper struct for struct VkWriteDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSet.html
  struct WriteDescriptorSet
  {
    using NativeType = VkWriteDescriptorSet;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSet;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( DescriptorSet                dstSet_           = {},
                                             uint32_t                     dstBinding_       = {},
                                             uint32_t                     dstArrayElement_  = {},
                                             uint32_t                     descriptorCount_  = {},
                                             DescriptorType               descriptorType_   = DescriptorType::eSampler,
                                             const DescriptorImageInfo *  pImageInfo_       = {},
                                             const DescriptorBufferInfo * pBufferInfo_      = {},
                                             const BufferView *           pTexelBufferView_ = {},
                                             const void *                 pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dstSet{ dstSet_ }
      , dstBinding{ dstBinding_ }
      , dstArrayElement{ dstArrayElement_ }
      , descriptorCount{ descriptorCount_ }
      , descriptorType{ descriptorType_ }
      , pImageInfo{ pImageInfo_ }
      , pBufferInfo{ pBufferInfo_ }
      , pTexelBufferView{ pTexelBufferView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet( DescriptorSet                                               dstSet_,
                        uint32_t                                                    dstBinding_,
                        uint32_t                                                    dstArrayElement_,
                        DescriptorType                                              descriptorType_,
                        ArrayProxyNoTemporaries<const DescriptorImageInfo> const &  imageInfo_,
                        ArrayProxyNoTemporaries<const DescriptorBufferInfo> const & bufferInfo_      = {},
                        ArrayProxyNoTemporaries<const BufferView> const &           texelBufferView_ = {},
                        const void *                                                pNext_           = nullptr )
      : pNext( pNext_ )
      , dstSet( dstSet_ )
      , dstBinding( dstBinding_ )
      , dstArrayElement( dstArrayElement_ )
      , descriptorCount( static_cast<uint32_t>( !imageInfo_.empty()    ? imageInfo_.size()
                                                : !bufferInfo_.empty() ? bufferInfo_.size()
                                                                       : texelBufferView_.size() ) )
      , descriptorType( descriptorType_ )
      , pImageInfo( imageInfo_.data() )
      , pBufferInfo( bufferInfo_.data() )
      , pTexelBufferView( texelBufferView_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 );
#    else
      if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSet const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSet = dstSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageInfo = pImageInfo_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setImageInfo( ArrayProxyNoTemporaries<const DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
      pImageInfo      = imageInfo_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferInfo = pBufferInfo_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setBufferInfo( ArrayProxyNoTemporaries<const DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
      pBufferInfo     = bufferInfo_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
    {
      pTexelBufferView = pTexelBufferView_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setTexelBufferView( ArrayProxyNoTemporaries<const BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount  = static_cast<uint32_t>( texelBufferView_.size() );
      pTexelBufferView = texelBufferView_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSet *>( this );
    }

    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSet *>( this );
    }

    operator VkWriteDescriptorSet const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSet *>( this );
    }

    operator VkWriteDescriptorSet *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSet *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               DescriptorSet const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               DescriptorType const &,
               const DescriptorImageInfo * const &,
               const DescriptorBufferInfo * const &,
               const BufferView * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSet const & ) const = default;
#else
    bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
             ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
             ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView );
#  endif
    }

    bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType            = StructureType::eWriteDescriptorSet;
    const void *                 pNext            = {};
    DescriptorSet                dstSet           = {};
    uint32_t                     dstBinding       = {};
    uint32_t                     dstArrayElement  = {};
    uint32_t                     descriptorCount  = {};
    DescriptorType               descriptorType   = DescriptorType::eSampler;
    const DescriptorImageInfo *  pImageInfo       = {};
    const DescriptorBufferInfo * pBufferInfo      = {};
    const BufferView *           pTexelBufferView = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSet>
  {
    using Type = WriteDescriptorSet;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
  {
    using Type = WriteDescriptorSet;
  };

  // wrapper struct for struct VkPushDescriptorSetInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetInfo.html
  struct PushDescriptorSetInfo
  {
    using NativeType = VkPushDescriptorSetInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePushDescriptorSetInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( ShaderStageFlags           stageFlags_           = {},
                                                PipelineLayout             layout_               = {},
                                                uint32_t                   set_                  = {},
                                                uint32_t                   descriptorWriteCount_ = {},
                                                const WriteDescriptorSet * pDescriptorWrites_    = {},
                                                const void *               pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stageFlags{ stageFlags_ }
      , layout{ layout_ }
      , set{ set_ }
      , descriptorWriteCount{ descriptorWriteCount_ }
      , pDescriptorWrites{ pDescriptorWrites_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushDescriptorSetInfo( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PushDescriptorSetInfo( *reinterpret_cast<PushDescriptorSetInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PushDescriptorSetInfo( ShaderStageFlags                                          stageFlags_,
                           PipelineLayout                                            layout_,
                           uint32_t                                                  set_,
                           ArrayProxyNoTemporaries<const WriteDescriptorSet> const & descriptorWrites_,
                           const void *                                              pNext_ = nullptr )
      : pNext( pNext_ )
      , stageFlags( stageFlags_ )
      , layout( layout_ )
      , set( set_ )
      , descriptorWriteCount( static_cast<uint32_t>( descriptorWrites_.size() ) )
      , pDescriptorWrites( descriptorWrites_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PushDescriptorSetInfo & operator=( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PushDescriptorSetInfo & operator=( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PushDescriptorSetInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
    {
      set = set_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorWriteCount = descriptorWriteCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPDescriptorWrites( const WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorWrites = pDescriptorWrites_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PushDescriptorSetInfo & setDescriptorWrites( ArrayProxyNoTemporaries<const WriteDescriptorSet> const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorWriteCount = static_cast<uint32_t>( descriptorWrites_.size() );
      pDescriptorWrites    = descriptorWrites_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkPushDescriptorSetInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPushDescriptorSetInfo *>( this );
    }

    operator VkPushDescriptorSetInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPushDescriptorSetInfo *>( this );
    }

    operator VkPushDescriptorSetInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPushDescriptorSetInfo *>( this );
    }

    operator VkPushDescriptorSetInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPushDescriptorSetInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderStageFlags const &,
               PipelineLayout const &,
               uint32_t const &,
               uint32_t const &,
               const WriteDescriptorSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stageFlags, layout, set, descriptorWriteCount, pDescriptorWrites );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PushDescriptorSetInfo const & ) const = default;
#else
    bool operator==( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ) &&
             ( descriptorWriteCount == rhs.descriptorWriteCount ) && ( pDescriptorWrites == rhs.pDescriptorWrites );
#  endif
    }

    bool operator!=( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                = StructureType::ePushDescriptorSetInfo;
    const void *               pNext                = {};
    ShaderStageFlags           stageFlags           = {};
    PipelineLayout             layout               = {};
    uint32_t                   set                  = {};
    uint32_t                   descriptorWriteCount = {};
    const WriteDescriptorSet * pDescriptorWrites    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPushDescriptorSetInfo>
  {
    using Type = PushDescriptorSetInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePushDescriptorSetInfo>
  {
    using Type = PushDescriptorSetInfo;
  };

  using PushDescriptorSetInfoKHR = PushDescriptorSetInfo;

  // wrapper struct for struct VkPushDescriptorSetWithTemplateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetWithTemplateInfo.html
  struct PushDescriptorSetWithTemplateInfo
  {
    using NativeType = VkPushDescriptorSetWithTemplateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::ePushDescriptorSetWithTemplateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( DescriptorUpdateTemplate descriptorUpdateTemplate_ = {},
                                                            PipelineLayout           layout_                   = {},
                                                            uint32_t                 set_                      = {},
                                                            const void *             pData_                    = {},
                                                            const void *             pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , descriptorUpdateTemplate{ descriptorUpdateTemplate_ }
      , layout{ layout_ }
      , set{ set_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushDescriptorSetWithTemplateInfo( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PushDescriptorSetWithTemplateInfo( *reinterpret_cast<PushDescriptorSetWithTemplateInfo const *>( &rhs ) )
    {
    }

    PushDescriptorSetWithTemplateInfo & operator=( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    PushDescriptorSetWithTemplateInfo & operator=( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<PushDescriptorSetWithTemplateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo &
      setDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorUpdateTemplate = descriptorUpdateTemplate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
    {
      set = set_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkPushDescriptorSetWithTemplateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( this );
    }

    operator VkPushDescriptorSetWithTemplateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPushDescriptorSetWithTemplateInfo *>( this );
    }

    operator VkPushDescriptorSetWithTemplateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( this );
    }

    operator VkPushDescriptorSetWithTemplateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkPushDescriptorSetWithTemplateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DescriptorUpdateTemplate const &, PipelineLayout const &, uint32_t const &, const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, descriptorUpdateTemplate, layout, set, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PushDescriptorSetWithTemplateInfo const & ) const = default;
#else
    bool operator==( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorUpdateTemplate == rhs.descriptorUpdateTemplate ) && ( layout == rhs.layout ) &&
             ( set == rhs.set ) && ( pData == rhs.pData );
#  endif
    }

    bool operator!=( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType                    = StructureType::ePushDescriptorSetWithTemplateInfo;
    const void *             pNext                    = {};
    DescriptorUpdateTemplate descriptorUpdateTemplate = {};
    PipelineLayout           layout                   = {};
    uint32_t                 set                      = {};
    const void *             pData                    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkPushDescriptorSetWithTemplateInfo>
  {
    using Type = PushDescriptorSetWithTemplateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::ePushDescriptorSetWithTemplateInfo>
  {
    using Type = PushDescriptorSetWithTemplateInfo;
  };

  using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo;

  // wrapper struct for struct VkQueryLowLatencySupportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryLowLatencySupportNV.html
  struct QueryLowLatencySupportNV
  {
    using NativeType = VkQueryLowLatencySupportNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueryLowLatencySupportNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pQueriedLowLatencyData{ pQueriedLowLatencyData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryLowLatencySupportNV( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryLowLatencySupportNV( *reinterpret_cast<QueryLowLatencySupportNV const *>( &rhs ) )
    {
    }

    QueryLowLatencySupportNV & operator=( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueryLowLatencySupportNV & operator=( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueryLowLatencySupportNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPQueriedLowLatencyData( void * pQueriedLowLatencyData_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueriedLowLatencyData = pQueriedLowLatencyData_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueryLowLatencySupportNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryLowLatencySupportNV *>( this );
    }

    operator VkQueryLowLatencySupportNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryLowLatencySupportNV *>( this );
    }

    operator VkQueryLowLatencySupportNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueryLowLatencySupportNV *>( this );
    }

    operator VkQueryLowLatencySupportNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueryLowLatencySupportNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pQueriedLowLatencyData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryLowLatencySupportNV const & ) const = default;
#else
    bool operator==( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pQueriedLowLatencyData == rhs.pQueriedLowLatencyData );
#  endif
    }

    bool operator!=( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                  = StructureType::eQueryLowLatencySupportNV;
    const void *  pNext                  = {};
    void *        pQueriedLowLatencyData = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueryLowLatencySupportNV>
  {
    using Type = QueryLowLatencySupportNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueryLowLatencySupportNV>
  {
    using Type = QueryLowLatencySupportNV;
  };

  // wrapper struct for struct VkQueryPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateInfo.html
  struct QueryPoolCreateInfo
  {
    using NativeType = VkQueryPoolCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueryPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateFlags        flags_              = {},
                                              QueryType                   queryType_          = QueryType::eOcclusion,
                                              uint32_t                    queryCount_         = {},
                                              QueryPipelineStatisticFlags pipelineStatistics_ = {},
                                              const void *                pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , queryType{ queryType_ }
      , queryCount{ queryCount_ }
      , pipelineStatistics{ pipelineStatistics_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
    {
    }

    QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
    {
      queryType = queryType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queryCount = queryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatistics = pipelineStatistics_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolCreateInfo *>( this );
    }

    operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolCreateInfo *>( this );
    }

    operator VkQueryPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueryPoolCreateInfo *>( this );
    }

    operator VkQueryPoolCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueryPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, QueryPoolCreateFlags const &, QueryType const &, uint32_t const &, QueryPipelineStatisticFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolCreateInfo const & ) const = default;
#else
    bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) &&
             ( pipelineStatistics == rhs.pipelineStatistics );
#  endif
    }

    bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType              = StructureType::eQueryPoolCreateInfo;
    const void *                pNext              = {};
    QueryPoolCreateFlags        flags              = {};
    QueryType                   queryType          = QueryType::eOcclusion;
    uint32_t                    queryCount         = {};
    QueryPipelineStatisticFlags pipelineStatistics = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueryPoolCreateInfo>
  {
    using Type = QueryPoolCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
  {
    using Type = QueryPoolCreateInfo;
  };

  // wrapper struct for struct VkQueryPoolPerformanceCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceCreateInfoKHR.html
  struct QueryPoolPerformanceCreateInfoKHR
  {
    using NativeType = VkQueryPoolPerformanceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueryPoolPerformanceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t         queueFamilyIndex_  = {},
                                                            uint32_t         counterIndexCount_ = {},
                                                            const uint32_t * pCounterIndices_   = {},
                                                            const void *     pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
      , counterIndexCount{ counterIndexCount_ }
      , pCounterIndices{ pCounterIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    QueryPoolPerformanceCreateInfoKHR( uint32_t                                        queueFamilyIndex_,
                                       ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_,
                                       const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , queueFamilyIndex( queueFamilyIndex_ )
      , counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) )
      , pCounterIndices( counterIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      counterIndexCount = counterIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pCounterIndices = pCounterIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    QueryPoolPerformanceCreateInfoKHR & setCounterIndices( ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
      pCounterIndices   = counterIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

    operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

    operator VkQueryPoolPerformanceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

    operator VkQueryPoolPerformanceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
#else
    bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
             ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices );
#  endif
    }

    bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType             = StructureType::eQueryPoolPerformanceCreateInfoKHR;
    const void *     pNext             = {};
    uint32_t         queueFamilyIndex  = {};
    uint32_t         counterIndexCount = {};
    const uint32_t * pCounterIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueryPoolPerformanceCreateInfoKHR>
  {
    using Type = QueryPoolPerformanceCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
  {
    using Type = QueryPoolPerformanceCreateInfoKHR;
  };

  // wrapper struct for struct VkQueryPoolPerformanceQueryCreateInfoINTEL, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceQueryCreateInfoINTEL.html
  struct QueryPoolPerformanceQueryCreateInfoINTEL
  {
    using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolSamplingModeINTEL performanceCountersSampling_ = QueryPoolSamplingModeINTEL::eManual,
                                                const void *               pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , performanceCountersSampling{ performanceCountersSampling_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
    {
    }

    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL &
      setPerformanceCountersSampling( QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCountersSampling = performanceCountersSampling_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, QueryPoolSamplingModeINTEL const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, performanceCountersSampling );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default;
#else
    bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling );
#  endif
    }

    bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                       = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
    const void *               pNext                       = {};
    QueryPoolSamplingModeINTEL performanceCountersSampling = QueryPoolSamplingModeINTEL::eManual;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueryPoolPerformanceQueryCreateInfoINTEL>
  {
    using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
  {
    using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
  };

  using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;

  // wrapper struct for struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolVideoEncodeFeedbackCreateInfoKHR.html
  struct QueryPoolVideoEncodeFeedbackCreateInfoKHR
  {
    using NativeType = VkQueryPoolVideoEncodeFeedbackCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {},
                                                                    const void *                pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , encodeFeedbackFlags{ encodeFeedbackFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolVideoEncodeFeedbackCreateInfoKHR( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryPoolVideoEncodeFeedbackCreateInfoKHR( *reinterpret_cast<QueryPoolVideoEncodeFeedbackCreateInfoKHR const *>( &rhs ) )
    {
    }

    QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueryPoolVideoEncodeFeedbackCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR &
      setEncodeFeedbackFlags( VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      encodeFeedbackFlags = encodeFeedbackFlags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *>( this );
    }

    operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *>( this );
    }

    operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *>( this );
    }

    operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoEncodeFeedbackFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, encodeFeedbackFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & ) const = default;
#else
    bool operator==( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( encodeFeedbackFlags == rhs.encodeFeedbackFlags );
#  endif
    }

    bool operator!=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType               = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR;
    const void *                pNext               = {};
    VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueryPoolVideoEncodeFeedbackCreateInfoKHR>
  {
    using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR>
  {
    using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR;
  };

  // wrapper struct for struct VkQueueFamilyCheckpointProperties2NV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointProperties2NV.html
  struct QueueFamilyCheckpointProperties2NV
  {
    using NativeType = VkQueueFamilyCheckpointProperties2NV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyCheckpointProperties2NV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( PipelineStageFlags2 checkpointExecutionStageMask_ = {},
                                                             void *              pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , checkpointExecutionStageMask{ checkpointExecutionStageMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
    {
    }

    QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV *>( this );
    }

    operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV *>( this );
    }

    operator VkQueueFamilyCheckpointProperties2NV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV *>( this );
    }

    operator VkQueueFamilyCheckpointProperties2NV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyCheckpointProperties2NV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PipelineStageFlags2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, checkpointExecutionStageMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
#else
    bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
#  endif
    }

    bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                        = StructureType::eQueueFamilyCheckpointProperties2NV;
    void *              pNext                        = {};
    PipelineStageFlags2 checkpointExecutionStageMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyCheckpointProperties2NV>
  {
    using Type = QueueFamilyCheckpointProperties2NV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
  {
    using Type = QueueFamilyCheckpointProperties2NV;
  };

  // wrapper struct for struct VkQueueFamilyCheckpointPropertiesNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointPropertiesNV.html
  struct QueueFamilyCheckpointPropertiesNV
  {
    using NativeType = VkQueueFamilyCheckpointPropertiesNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyCheckpointPropertiesNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( PipelineStageFlags checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , checkpointExecutionStageMask{ checkpointExecutionStageMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
    {
    }

    QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

    operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

    operator VkQueueFamilyCheckpointPropertiesNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

    operator VkQueueFamilyCheckpointPropertiesNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PipelineStageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, checkpointExecutionStageMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default;
#else
    bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
#  endif
    }

    bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType                        = StructureType::eQueueFamilyCheckpointPropertiesNV;
    void *             pNext                        = {};
    PipelineStageFlags checkpointExecutionStageMask = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyCheckpointPropertiesNV>
  {
    using Type = QueueFamilyCheckpointPropertiesNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
  {
    using Type = QueueFamilyCheckpointPropertiesNV;
  };

  // wrapper struct for struct VkQueueFamilyDataGraphProcessingEnginePropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyDataGraphProcessingEnginePropertiesARM.html
  struct QueueFamilyDataGraphProcessingEnginePropertiesARM
  {
    using NativeType = VkQueueFamilyDataGraphProcessingEnginePropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyDataGraphProcessingEnginePropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyDataGraphProcessingEnginePropertiesARM( ExternalSemaphoreHandleTypeFlags foreignSemaphoreHandleTypes_ = {},
                                                                            ExternalMemoryHandleTypeFlags    foreignMemoryHandleTypes_    = {},
                                                                            const void *                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , foreignSemaphoreHandleTypes{ foreignSemaphoreHandleTypes_ }
      , foreignMemoryHandleTypes{ foreignMemoryHandleTypes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      QueueFamilyDataGraphProcessingEnginePropertiesARM( QueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyDataGraphProcessingEnginePropertiesARM( VkQueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyDataGraphProcessingEnginePropertiesARM( *reinterpret_cast<QueueFamilyDataGraphProcessingEnginePropertiesARM const *>( &rhs ) )
    {
    }

    QueueFamilyDataGraphProcessingEnginePropertiesARM &
      operator=( QueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyDataGraphProcessingEnginePropertiesARM & operator=( VkQueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyDataGraphProcessingEnginePropertiesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphProcessingEnginePropertiesARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphProcessingEnginePropertiesARM &
      setForeignSemaphoreHandleTypes( ExternalSemaphoreHandleTypeFlags foreignSemaphoreHandleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      foreignSemaphoreHandleTypes = foreignSemaphoreHandleTypes_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphProcessingEnginePropertiesARM &
      setForeignMemoryHandleTypes( ExternalMemoryHandleTypeFlags foreignMemoryHandleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      foreignMemoryHandleTypes = foreignMemoryHandleTypes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueueFamilyDataGraphProcessingEnginePropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphProcessingEnginePropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphProcessingEnginePropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphProcessingEnginePropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ExternalSemaphoreHandleTypeFlags const &, ExternalMemoryHandleTypeFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, foreignSemaphoreHandleTypes, foreignMemoryHandleTypes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyDataGraphProcessingEnginePropertiesARM const & ) const = default;
#else
    bool operator==( QueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( foreignSemaphoreHandleTypes == rhs.foreignSemaphoreHandleTypes ) &&
             ( foreignMemoryHandleTypes == rhs.foreignMemoryHandleTypes );
#  endif
    }

    bool operator!=( QueueFamilyDataGraphProcessingEnginePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                       = StructureType::eQueueFamilyDataGraphProcessingEnginePropertiesARM;
    const void *                     pNext                       = {};
    ExternalSemaphoreHandleTypeFlags foreignSemaphoreHandleTypes = {};
    ExternalMemoryHandleTypeFlags    foreignMemoryHandleTypes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyDataGraphProcessingEnginePropertiesARM>
  {
    using Type = QueueFamilyDataGraphProcessingEnginePropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyDataGraphProcessingEnginePropertiesARM>
  {
    using Type = QueueFamilyDataGraphProcessingEnginePropertiesARM;
  };

  // wrapper struct for struct VkQueueFamilyDataGraphPropertiesARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyDataGraphPropertiesARM.html
  struct QueueFamilyDataGraphPropertiesARM
  {
    using NativeType = VkQueueFamilyDataGraphPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyDataGraphPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphPropertiesARM( PhysicalDeviceDataGraphProcessingEngineARM engine_    = {},
                                                               PhysicalDeviceDataGraphOperationSupportARM operation_ = {},
                                                               const void *                               pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , engine{ engine_ }
      , operation{ operation_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphPropertiesARM( QueueFamilyDataGraphPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyDataGraphPropertiesARM( VkQueueFamilyDataGraphPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyDataGraphPropertiesARM( *reinterpret_cast<QueueFamilyDataGraphPropertiesARM const *>( &rhs ) )
    {
    }

    QueueFamilyDataGraphPropertiesARM & operator=( QueueFamilyDataGraphPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyDataGraphPropertiesARM & operator=( VkQueueFamilyDataGraphPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyDataGraphPropertiesARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphPropertiesARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphPropertiesARM & setEngine( PhysicalDeviceDataGraphProcessingEngineARM const & engine_ ) VULKAN_HPP_NOEXCEPT
    {
      engine = engine_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyDataGraphPropertiesARM &
      setOperation( PhysicalDeviceDataGraphOperationSupportARM const & operation_ ) VULKAN_HPP_NOEXCEPT
    {
      operation = operation_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkQueueFamilyDataGraphPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyDataGraphPropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyDataGraphPropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyDataGraphPropertiesARM *>( this );
    }

    operator VkQueueFamilyDataGraphPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyDataGraphPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, PhysicalDeviceDataGraphProcessingEngineARM const &, PhysicalDeviceDataGraphOperationSupportARM const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, engine, operation );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyDataGraphPropertiesARM const & ) const = default;
#else
    bool operator==( QueueFamilyDataGraphPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( engine == rhs.engine ) && ( operation == rhs.operation );
#  endif
    }

    bool operator!=( QueueFamilyDataGraphPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                              sType     = StructureType::eQueueFamilyDataGraphPropertiesARM;
    const void *                               pNext     = {};
    PhysicalDeviceDataGraphProcessingEngineARM engine    = {};
    PhysicalDeviceDataGraphOperationSupportARM operation = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyDataGraphPropertiesARM>
  {
    using Type = QueueFamilyDataGraphPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyDataGraphPropertiesARM>
  {
    using Type = QueueFamilyDataGraphPropertiesARM;
  };

  // wrapper struct for struct VkQueueFamilyGlobalPriorityProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyGlobalPriorityProperties.html
  struct QueueFamilyGlobalPriorityProperties
  {
    using NativeType = VkQueueFamilyGlobalPriorityProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyGlobalPriorityProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      QueueFamilyGlobalPriorityProperties( uint32_t                                                             priorityCount_ = {},
                                           std::array<QueueGlobalPriority, VK_MAX_GLOBAL_PRIORITY_SIZE> const & priorities_    = { { QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow,
                                                                                                                                     QueueGlobalPriority::eLow } },
                                           void *                                                               pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , priorityCount{ priorityCount_ }
      , priorities{ priorities_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyGlobalPriorityProperties( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyGlobalPriorityProperties( *reinterpret_cast<QueueFamilyGlobalPriorityProperties const *>( &rhs ) )
    {
    }

    QueueFamilyGlobalPriorityProperties & operator=( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyGlobalPriorityProperties & operator=( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyGlobalPriorityProperties const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyGlobalPriorityProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyGlobalPriorityProperties *>( this );
    }

    operator VkQueueFamilyGlobalPriorityProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyGlobalPriorityProperties *>( this );
    }

    operator VkQueueFamilyGlobalPriorityProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyGlobalPriorityProperties *>( this );
    }

    operator VkQueueFamilyGlobalPriorityProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyGlobalPriorityProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, ArrayWrapper1D<QueueGlobalPriority, VK_MAX_GLOBAL_PRIORITY_SIZE> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, priorityCount, priorities );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = priorityCount <=> rhs.priorityCount; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < priorityCount; ++i )
      {
        if ( auto cmp = priorities[i] <=> rhs.priorities[i]; cmp != 0 )
          return cmp;
      }

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) &&
             ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( QueueGlobalPriority ) ) == 0 );
    }

    bool operator!=( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                                    sType         = StructureType::eQueueFamilyGlobalPriorityProperties;
    void *                                                           pNext         = {};
    uint32_t                                                         priorityCount = {};
    ArrayWrapper1D<QueueGlobalPriority, VK_MAX_GLOBAL_PRIORITY_SIZE> priorities    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyGlobalPriorityProperties>
  {
    using Type = QueueFamilyGlobalPriorityProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityProperties>
  {
    using Type = QueueFamilyGlobalPriorityProperties;
  };

  using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties;
  using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties;

  // wrapper struct for struct VkQueueFamilyOwnershipTransferPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyOwnershipTransferPropertiesKHR.html
  struct QueueFamilyOwnershipTransferPropertiesKHR
  {
    using NativeType = VkQueueFamilyOwnershipTransferPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyOwnershipTransferPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyOwnershipTransferPropertiesKHR( uint32_t optimalImageTransferToQueueFamilies_ = {},
                                                                    void *   pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , optimalImageTransferToQueueFamilies{ optimalImageTransferToQueueFamilies_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyOwnershipTransferPropertiesKHR( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyOwnershipTransferPropertiesKHR( VkQueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyOwnershipTransferPropertiesKHR( *reinterpret_cast<QueueFamilyOwnershipTransferPropertiesKHR const *>( &rhs ) )
    {
    }

    QueueFamilyOwnershipTransferPropertiesKHR & operator=( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyOwnershipTransferPropertiesKHR & operator=( VkQueueFamilyOwnershipTransferPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyOwnershipTransferPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyOwnershipTransferPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyOwnershipTransferPropertiesKHR *>( this );
    }

    operator VkQueueFamilyOwnershipTransferPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyOwnershipTransferPropertiesKHR *>( this );
    }

    operator VkQueueFamilyOwnershipTransferPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyOwnershipTransferPropertiesKHR *>( this );
    }

    operator VkQueueFamilyOwnershipTransferPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyOwnershipTransferPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, optimalImageTransferToQueueFamilies );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyOwnershipTransferPropertiesKHR const & ) const = default;
#else
    bool operator==( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalImageTransferToQueueFamilies == rhs.optimalImageTransferToQueueFamilies );
#  endif
    }

    bool operator!=( QueueFamilyOwnershipTransferPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::eQueueFamilyOwnershipTransferPropertiesKHR;
    void *        pNext                               = {};
    uint32_t      optimalImageTransferToQueueFamilies = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyOwnershipTransferPropertiesKHR>
  {
    using Type = QueueFamilyOwnershipTransferPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyOwnershipTransferPropertiesKHR>
  {
    using Type = QueueFamilyOwnershipTransferPropertiesKHR;
  };

  // wrapper struct for struct VkQueueFamilyProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties.html
  struct QueueFamilyProperties
  {
    using NativeType = VkQueueFamilyProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFlags queueFlags_                  = {},
                                                uint32_t   queueCount_                  = {},
                                                uint32_t   timestampValidBits_          = {},
                                                Extent3D   minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
      : queueFlags{ queueFlags_ }
      , queueCount{ queueCount_ }
      , timestampValidBits{ timestampValidBits_ }
      , minImageTransferGranularity{ minImageTransferGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
    {
    }

    QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyProperties const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyProperties *>( this );
    }

    operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyProperties *>( this );
    }

    operator VkQueueFamilyProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyProperties *>( this );
    }

    operator VkQueueFamilyProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<QueueFlags const &, uint32_t const &, uint32_t const &, Extent3D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyProperties const & ) const = default;
#else
    bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) &&
             ( minImageTransferGranularity == rhs.minImageTransferGranularity );
#  endif
    }

    bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    QueueFlags queueFlags                  = {};
    uint32_t   queueCount                  = {};
    uint32_t   timestampValidBits          = {};
    Extent3D   minImageTransferGranularity = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyProperties>
  {
    using Type = QueueFamilyProperties;
  };
#endif

  // wrapper struct for struct VkQueueFamilyProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties2.html
  struct QueueFamilyProperties2
  {
    using NativeType = VkQueueFamilyProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties queueFamilyProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueFamilyProperties{ queueFamilyProperties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
    {
    }

    QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyProperties2 *>( this );
    }

    operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyProperties2 *>( this );
    }

    operator VkQueueFamilyProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyProperties2 *>( this );
    }

    operator VkQueueFamilyProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, QueueFamilyProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queueFamilyProperties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyProperties2 const & ) const = default;
#else
    bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties );
#  endif
    }

    bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType                 = StructureType::eQueueFamilyProperties2;
    void *                pNext                 = {};
    QueueFamilyProperties queueFamilyProperties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyProperties2>
  {
    using Type = QueueFamilyProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
  {
    using Type = QueueFamilyProperties2;
  };

  using QueueFamilyProperties2KHR = QueueFamilyProperties2;

  // wrapper struct for struct VkQueueFamilyQueryResultStatusPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyQueryResultStatusPropertiesKHR.html
  struct QueueFamilyQueryResultStatusPropertiesKHR
  {
    using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( Bool32 queryResultStatusSupport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queryResultStatusSupport{ queryResultStatusSupport_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast<QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs ) )
    {
    }

    QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyQueryResultStatusPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
    }

    operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
    }

    operator VkQueueFamilyQueryResultStatusPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
    }

    operator VkQueueFamilyQueryResultStatusPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyQueryResultStatusPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queryResultStatusSupport );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default;
#else
    bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport );
#  endif
    }

    bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR;
    void *        pNext                    = {};
    Bool32        queryResultStatusSupport = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyQueryResultStatusPropertiesKHR>
  {
    using Type = QueueFamilyQueryResultStatusPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusPropertiesKHR>
  {
    using Type = QueueFamilyQueryResultStatusPropertiesKHR;
  };

  // wrapper struct for struct VkQueueFamilyVideoPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyVideoPropertiesKHR.html
  struct QueueFamilyVideoPropertiesKHR
  {
    using NativeType = VkQueueFamilyVideoPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eQueueFamilyVideoPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoCodecOperations{ videoCodecOperations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyVideoPropertiesKHR( *reinterpret_cast<QueueFamilyVideoPropertiesKHR const *>( &rhs ) )
    {
    }

    QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<QueueFamilyVideoPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR *>( this );
    }

    operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyVideoPropertiesKHR *>( this );
    }

    operator VkQueueFamilyVideoPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkQueueFamilyVideoPropertiesKHR *>( this );
    }

    operator VkQueueFamilyVideoPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkQueueFamilyVideoPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, VideoCodecOperationFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoCodecOperations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default;
#else
    bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations );
#  endif
    }

    bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                = StructureType::eQueueFamilyVideoPropertiesKHR;
    void *                      pNext                = {};
    VideoCodecOperationFlagsKHR videoCodecOperations = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkQueueFamilyVideoPropertiesKHR>
  {
    using Type = QueueFamilyVideoPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyVideoPropertiesKHR>
  {
    using Type = QueueFamilyVideoPropertiesKHR;
  };

  // wrapper struct for struct VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV.html
  struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV
  {
    using NativeType = VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( Bool32 allowClusterAccelerationStructure_ = {},
                                                                                     void * pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allowClusterAccelerationStructure{ allowClusterAccelerationStructure_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineClusterAccelerationStructureCreateInfoNV(
          *reinterpret_cast<RayTracingPipelineClusterAccelerationStructureCreateInfoNV const *>( &rhs ) )
    {
    }

    RayTracingPipelineClusterAccelerationStructureCreateInfoNV &
      operator=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingPipelineClusterAccelerationStructureCreateInfoNV &
      operator=( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingPipelineClusterAccelerationStructureCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV &
      setAllowClusterAccelerationStructure( Bool32 allowClusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      allowClusterAccelerationStructure = allowClusterAccelerationStructure_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allowClusterAccelerationStructure );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & ) const = default;
#else
    bool operator==( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowClusterAccelerationStructure == rhs.allowClusterAccelerationStructure );
#  endif
    }

    bool operator!=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                             = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV;
    void *        pNext                             = {};
    Bool32        allowClusterAccelerationStructure = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV>
  {
    using Type = RayTracingPipelineClusterAccelerationStructureCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV>
  {
    using Type = RayTracingPipelineClusterAccelerationStructureCreateInfoNV;
  };

  // wrapper struct for struct VkRayTracingShaderGroupCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoKHR.html
  struct RayTracingShaderGroupCreateInfoKHR
  {
    using NativeType = VkRayTracingShaderGroupCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingShaderGroupCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupTypeKHR type_               = RayTracingShaderGroupTypeKHR::eGeneral,
                                                             uint32_t                     generalShader_      = ShaderUnusedKHR,
                                                             uint32_t                     closestHitShader_   = ShaderUnusedKHR,
                                                             uint32_t                     anyHitShader_       = ShaderUnusedKHR,
                                                             uint32_t                     intersectionShader_ = ShaderUnusedKHR,
                                                             const void *                 pShaderGroupCaptureReplayHandle_ = {},
                                                             const void *                 pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , generalShader{ generalShader_ }
      , closestHitShader{ closestHitShader_ }
      , anyHitShader{ anyHitShader_ }
      , intersectionShader{ intersectionShader_ }
      , pShaderGroupCaptureReplayHandle{ pShaderGroupCaptureReplayHandle_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
    {
    }

    RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
    {
      generalShader = generalShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      closestHitShader = closestHitShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      anyHitShader = anyHitShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
    {
      intersectionShader = intersectionShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
      setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RayTracingShaderGroupTypeKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) &&
             ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) &&
             ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
#  endif
    }

    bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                           = StructureType::eRayTracingShaderGroupCreateInfoKHR;
    const void *                 pNext                           = {};
    RayTracingShaderGroupTypeKHR type                            = RayTracingShaderGroupTypeKHR::eGeneral;
    uint32_t                     generalShader                   = ShaderUnusedKHR;
    uint32_t                     closestHitShader                = ShaderUnusedKHR;
    uint32_t                     anyHitShader                    = ShaderUnusedKHR;
    uint32_t                     intersectionShader              = ShaderUnusedKHR;
    const void *                 pShaderGroupCaptureReplayHandle = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingShaderGroupCreateInfoKHR>
  {
    using Type = RayTracingShaderGroupCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
  {
    using Type = RayTracingShaderGroupCreateInfoKHR;
  };

  // wrapper struct for struct VkRayTracingPipelineInterfaceCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineInterfaceCreateInfoKHR.html
  struct RayTracingPipelineInterfaceCreateInfoKHR
  {
    using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t     maxPipelineRayPayloadSize_      = {},
                                                                   uint32_t     maxPipelineRayHitAttributeSize_ = {},
                                                                   const void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxPipelineRayPayloadSize{ maxPipelineRayPayloadSize_ }
      , maxPipelineRayHitAttributeSize{ maxPipelineRayHitAttributeSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &
      setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineInterfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineInterfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) &&
             ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
#  endif
    }

    bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
    const void *  pNext                          = {};
    uint32_t      maxPipelineRayPayloadSize      = {};
    uint32_t      maxPipelineRayHitAttributeSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingPipelineInterfaceCreateInfoKHR>
  {
    using Type = RayTracingPipelineInterfaceCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
  {
    using Type = RayTracingPipelineInterfaceCreateInfoKHR;
  };

  // wrapper struct for struct VkRayTracingPipelineCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoKHR.html
  struct RayTracingPipelineCreateInfoKHR
  {
    using NativeType = VkRayTracingPipelineCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingPipelineCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( PipelineCreateFlags                              flags_                        = {},
                                                          uint32_t                                         stageCount_                   = {},
                                                          const PipelineShaderStageCreateInfo *            pStages_                      = {},
                                                          uint32_t                                         groupCount_                   = {},
                                                          const RayTracingShaderGroupCreateInfoKHR *       pGroups_                      = {},
                                                          uint32_t                                         maxPipelineRayRecursionDepth_ = {},
                                                          const PipelineLibraryCreateInfoKHR *             pLibraryInfo_                 = {},
                                                          const RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_            = {},
                                                          const PipelineDynamicStateCreateInfo *           pDynamicState_                = {},
                                                          PipelineLayout                                   layout_                       = {},
                                                          Pipeline                                         basePipelineHandle_           = {},
                                                          int32_t                                          basePipelineIndex_            = {},
                                                          const void *                                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stageCount{ stageCount_ }
      , pStages{ pStages_ }
      , groupCount{ groupCount_ }
      , pGroups{ pGroups_ }
      , maxPipelineRayRecursionDepth{ maxPipelineRayRecursionDepth_ }
      , pLibraryInfo{ pLibraryInfo_ }
      , pLibraryInterface{ pLibraryInterface_ }
      , pDynamicState{ pDynamicState_ }
      , layout{ layout_ }
      , basePipelineHandle{ basePipelineHandle_ }
      , basePipelineIndex{ basePipelineIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR( PipelineCreateFlags                                                       flags_,
                                     ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const &      stages_,
                                     ArrayProxyNoTemporaries<const RayTracingShaderGroupCreateInfoKHR> const & groups_                       = {},
                                     uint32_t                                                                  maxPipelineRayRecursionDepth_ = {},
                                     const PipelineLibraryCreateInfoKHR *                                      pLibraryInfo_                 = {},
                                     const RayTracingPipelineInterfaceCreateInfoKHR *                          pLibraryInterface_            = {},
                                     const PipelineDynamicStateCreateInfo *                                    pDynamicState_                = {},
                                     PipelineLayout                                                            layout_                       = {},
                                     Pipeline                                                                  basePipelineHandle_           = {},
                                     int32_t                                                                   basePipelineIndex_            = {},
                                     const void *                                                              pNext_                        = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ )
      , pLibraryInfo( pLibraryInfo_ )
      , pLibraryInterface( pLibraryInterface_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR & setStages( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPGroups( const RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR & setGroups( ArrayProxyNoTemporaries<const RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInfo( const PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraryInfo = pLibraryInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
      setPLibraryInterface( const RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraryInterface = pLibraryInterface_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState( const PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicState = pDynamicState_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags const &,
               uint32_t const &,
               const PipelineShaderStageCreateInfo * const &,
               uint32_t const &,
               const RayTracingShaderGroupCreateInfoKHR * const &,
               uint32_t const &,
               const PipelineLibraryCreateInfoKHR * const &,
               const RayTracingPipelineInterfaceCreateInfoKHR * const &,
               const PipelineDynamicStateCreateInfo * const &,
               PipelineLayout const &,
               Pipeline const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       stageCount,
                       pStages,
                       groupCount,
                       pGroups,
                       maxPipelineRayRecursionDepth,
                       pLibraryInfo,
                       pLibraryInterface,
                       pDynamicState,
                       layout,
                       basePipelineHandle,
                       basePipelineIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) &&
             ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) &&
             ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
#  endif
    }

    bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                    sType                        = StructureType::eRayTracingPipelineCreateInfoKHR;
    const void *                                     pNext                        = {};
    PipelineCreateFlags                              flags                        = {};
    uint32_t                                         stageCount                   = {};
    const PipelineShaderStageCreateInfo *            pStages                      = {};
    uint32_t                                         groupCount                   = {};
    const RayTracingShaderGroupCreateInfoKHR *       pGroups                      = {};
    uint32_t                                         maxPipelineRayRecursionDepth = {};
    const PipelineLibraryCreateInfoKHR *             pLibraryInfo                 = {};
    const RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface            = {};
    const PipelineDynamicStateCreateInfo *           pDynamicState                = {};
    PipelineLayout                                   layout                       = {};
    Pipeline                                         basePipelineHandle           = {};
    int32_t                                          basePipelineIndex            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingPipelineCreateInfoKHR>
  {
    using Type = RayTracingPipelineCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
  {
    using Type = RayTracingPipelineCreateInfoKHR;
  };

  // wrapper struct for struct VkRayTracingShaderGroupCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoNV.html
  struct RayTracingShaderGroupCreateInfoNV
  {
    using NativeType = VkRayTracingShaderGroupCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingShaderGroupCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupTypeKHR type_               = RayTracingShaderGroupTypeKHR::eGeneral,
                                                            uint32_t                     generalShader_      = ShaderUnusedNV,
                                                            uint32_t                     closestHitShader_   = ShaderUnusedNV,
                                                            uint32_t                     anyHitShader_       = ShaderUnusedNV,
                                                            uint32_t                     intersectionShader_ = ShaderUnusedNV,
                                                            const void *                 pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , type{ type_ }
      , generalShader{ generalShader_ }
      , closestHitShader{ closestHitShader_ }
      , anyHitShader{ anyHitShader_ }
      , intersectionShader{ intersectionShader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
    {
    }

    RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
    {
      generalShader = generalShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      closestHitShader = closestHitShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      anyHitShader = anyHitShader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
    {
      intersectionShader = intersectionShader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RayTracingShaderGroupTypeKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default;
#else
    bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) &&
             ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader );
#  endif
    }

    bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType              = StructureType::eRayTracingShaderGroupCreateInfoNV;
    const void *                 pNext              = {};
    RayTracingShaderGroupTypeKHR type               = RayTracingShaderGroupTypeKHR::eGeneral;
    uint32_t                     generalShader      = ShaderUnusedNV;
    uint32_t                     closestHitShader   = ShaderUnusedNV;
    uint32_t                     anyHitShader       = ShaderUnusedNV;
    uint32_t                     intersectionShader = ShaderUnusedNV;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingShaderGroupCreateInfoNV>
  {
    using Type = RayTracingShaderGroupCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
  {
    using Type = RayTracingShaderGroupCreateInfoNV;
  };

  // wrapper struct for struct VkRayTracingPipelineCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoNV.html
  struct RayTracingPipelineCreateInfoNV
  {
    using NativeType = VkRayTracingPipelineCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRayTracingPipelineCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( PipelineCreateFlags                       flags_              = {},
                                                         uint32_t                                  stageCount_         = {},
                                                         const PipelineShaderStageCreateInfo *     pStages_            = {},
                                                         uint32_t                                  groupCount_         = {},
                                                         const RayTracingShaderGroupCreateInfoNV * pGroups_            = {},
                                                         uint32_t                                  maxRecursionDepth_  = {},
                                                         PipelineLayout                            layout_             = {},
                                                         Pipeline                                  basePipelineHandle_ = {},
                                                         int32_t                                   basePipelineIndex_  = {},
                                                         const void *                              pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stageCount{ stageCount_ }
      , pStages{ pStages_ }
      , groupCount{ groupCount_ }
      , pGroups{ pGroups_ }
      , maxRecursionDepth{ maxRecursionDepth_ }
      , layout{ layout_ }
      , basePipelineHandle{ basePipelineHandle_ }
      , basePipelineIndex{ basePipelineIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV( PipelineCreateFlags                                                      flags_,
                                    ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const &     stages_,
                                    ArrayProxyNoTemporaries<const RayTracingShaderGroupCreateInfoNV> const & groups_             = {},
                                    uint32_t                                                                 maxRecursionDepth_  = {},
                                    PipelineLayout                                                           layout_             = {},
                                    Pipeline                                                                 basePipelineHandle_ = {},
                                    int32_t                                                                  basePipelineIndex_  = {},
                                    const void *                                                             pNext_              = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , maxRecursionDepth( maxRecursionDepth_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV & setStages( ArrayProxyNoTemporaries<const PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPGroups( const RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV & setGroups( ArrayProxyNoTemporaries<const RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxRecursionDepth = maxRecursionDepth_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineCreateFlags const &,
               uint32_t const &,
               const PipelineShaderStageCreateInfo * const &,
               uint32_t const &,
               const RayTracingShaderGroupCreateInfoNV * const &,
               uint32_t const &,
               PipelineLayout const &,
               Pipeline const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default;
#else
    bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) &&
             ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
#  endif
    }

    bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                             sType              = StructureType::eRayTracingPipelineCreateInfoNV;
    const void *                              pNext              = {};
    PipelineCreateFlags                       flags              = {};
    uint32_t                                  stageCount         = {};
    const PipelineShaderStageCreateInfo *     pStages            = {};
    uint32_t                                  groupCount         = {};
    const RayTracingShaderGroupCreateInfoNV * pGroups            = {};
    uint32_t                                  maxRecursionDepth  = {};
    PipelineLayout                            layout             = {};
    Pipeline                                  basePipelineHandle = {};
    int32_t                                   basePipelineIndex  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRayTracingPipelineCreateInfoNV>
  {
    using Type = RayTracingPipelineCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
  {
    using Type = RayTracingPipelineCreateInfoNV;
  };

  // wrapper struct for struct VkRefreshCycleDurationGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRefreshCycleDurationGOOGLE.html
  struct RefreshCycleDurationGOOGLE
  {
    using NativeType = VkRefreshCycleDurationGOOGLE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration{ refreshDuration_ } {}

    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
    {
    }

    RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs );
      return *this;
    }

    operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE *>( this );
    }

    operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( this );
    }

    operator VkRefreshCycleDurationGOOGLE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRefreshCycleDurationGOOGLE *>( this );
    }

    operator VkRefreshCycleDurationGOOGLE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( refreshDuration );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default;
#else
    bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( refreshDuration == rhs.refreshDuration );
#  endif
    }

    bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint64_t refreshDuration = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRefreshCycleDurationGOOGLE>
  {
    using Type = RefreshCycleDurationGOOGLE;
  };
#endif

  // wrapper struct for struct VkReleaseCapturedPipelineDataInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseCapturedPipelineDataInfoKHR.html
  struct ReleaseCapturedPipelineDataInfoKHR
  {
    using NativeType = VkReleaseCapturedPipelineDataInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eReleaseCapturedPipelineDataInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pipeline{ pipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ReleaseCapturedPipelineDataInfoKHR( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ReleaseCapturedPipelineDataInfoKHR( *reinterpret_cast<ReleaseCapturedPipelineDataInfoKHR const *>( &rhs ) )
    {
    }

    ReleaseCapturedPipelineDataInfoKHR & operator=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ReleaseCapturedPipelineDataInfoKHR & operator=( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ReleaseCapturedPipelineDataInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkReleaseCapturedPipelineDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( this );
    }

    operator VkReleaseCapturedPipelineDataInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkReleaseCapturedPipelineDataInfoKHR *>( this );
    }

    operator VkReleaseCapturedPipelineDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( this );
    }

    operator VkReleaseCapturedPipelineDataInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkReleaseCapturedPipelineDataInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ReleaseCapturedPipelineDataInfoKHR const & ) const = default;
#else
    bool operator==( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline );
#  endif
    }

    bool operator!=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eReleaseCapturedPipelineDataInfoKHR;
    void *        pNext    = {};
    Pipeline      pipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkReleaseCapturedPipelineDataInfoKHR>
  {
    using Type = ReleaseCapturedPipelineDataInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eReleaseCapturedPipelineDataInfoKHR>
  {
    using Type = ReleaseCapturedPipelineDataInfoKHR;
  };

  // wrapper struct for struct VkReleaseSwapchainImagesInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseSwapchainImagesInfoKHR.html
  struct ReleaseSwapchainImagesInfoKHR
  {
    using NativeType = VkReleaseSwapchainImagesInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eReleaseSwapchainImagesInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoKHR( SwapchainKHR     swapchain_       = {},
                                                        uint32_t         imageIndexCount_ = {},
                                                        const uint32_t * pImageIndices_   = {},
                                                        const void *     pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchain{ swapchain_ }
      , imageIndexCount{ imageIndexCount_ }
      , pImageIndices{ pImageIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoKHR( ReleaseSwapchainImagesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ReleaseSwapchainImagesInfoKHR( VkReleaseSwapchainImagesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ReleaseSwapchainImagesInfoKHR( *reinterpret_cast<ReleaseSwapchainImagesInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ReleaseSwapchainImagesInfoKHR( SwapchainKHR swapchain_, ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast<uint32_t>( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ReleaseSwapchainImagesInfoKHR & operator=( ReleaseSwapchainImagesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ReleaseSwapchainImagesInfoKHR & operator=( VkReleaseSwapchainImagesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ReleaseSwapchainImagesInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoKHR & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageIndexCount = imageIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageIndices = pImageIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ReleaseSwapchainImagesInfoKHR & setImageIndices( ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      imageIndexCount = static_cast<uint32_t>( imageIndices_.size() );
      pImageIndices   = imageIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkReleaseSwapchainImagesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( this );
    }

    operator VkReleaseSwapchainImagesInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkReleaseSwapchainImagesInfoKHR *>( this );
    }

    operator VkReleaseSwapchainImagesInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( this );
    }

    operator VkReleaseSwapchainImagesInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkReleaseSwapchainImagesInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainKHR const &, uint32_t const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ReleaseSwapchainImagesInfoKHR const & ) const = default;
#else
    bool operator==( ReleaseSwapchainImagesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndexCount == rhs.imageIndexCount ) &&
             ( pImageIndices == rhs.pImageIndices );
#  endif
    }

    bool operator!=( ReleaseSwapchainImagesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType           = StructureType::eReleaseSwapchainImagesInfoKHR;
    const void *     pNext           = {};
    SwapchainKHR     swapchain       = {};
    uint32_t         imageIndexCount = {};
    const uint32_t * pImageIndices   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkReleaseSwapchainImagesInfoKHR>
  {
    using Type = ReleaseSwapchainImagesInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eReleaseSwapchainImagesInfoKHR>
  {
    using Type = ReleaseSwapchainImagesInfoKHR;
  };

  using ReleaseSwapchainImagesInfoEXT = ReleaseSwapchainImagesInfoKHR;

  // wrapper struct for struct VkRenderPassAttachmentBeginInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassAttachmentBeginInfo.html
  struct RenderPassAttachmentBeginInfo
  {
    using NativeType = VkRenderPassAttachmentBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassAttachmentBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, const ImageView * pAttachments_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentCount{ attachmentCount_ }
      , pAttachments{ pAttachments_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassAttachmentBeginInfo( ArrayProxyNoTemporaries<const ImageView> const & attachments_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassAttachmentBeginInfo & setAttachments( ArrayProxyNoTemporaries<const ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>( this );
    }

    operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassAttachmentBeginInfo *>( this );
    }

    operator VkRenderPassAttachmentBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>( this );
    }

    operator VkRenderPassAttachmentBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassAttachmentBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const ImageView * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, attachmentCount, pAttachments );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
#else
    bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments );
#  endif
    }

    bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType           = StructureType::eRenderPassAttachmentBeginInfo;
    const void *      pNext           = {};
    uint32_t          attachmentCount = {};
    const ImageView * pAttachments    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassAttachmentBeginInfo>
  {
    using Type = RenderPassAttachmentBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
  {
    using Type = RenderPassAttachmentBeginInfo;
  };

  using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;

  // wrapper struct for struct VkRenderPassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassBeginInfo.html
  struct RenderPassBeginInfo
  {
    using NativeType = VkRenderPassBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPass         renderPass_      = {},
                                                 Framebuffer        framebuffer_     = {},
                                                 Rect2D             renderArea_      = {},
                                                 uint32_t           clearValueCount_ = {},
                                                 const ClearValue * pClearValues_    = {},
                                                 const void *       pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , renderPass{ renderPass_ }
      , framebuffer{ framebuffer_ }
      , renderArea{ renderArea_ }
      , clearValueCount{ clearValueCount_ }
      , pClearValues{ pClearValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassBeginInfo( RenderPass                                        renderPass_,
                         Framebuffer                                       framebuffer_,
                         Rect2D                                            renderArea_,
                         ArrayProxyNoTemporaries<const ClearValue> const & clearValues_,
                         const void *                                      pNext_ = nullptr )
      : pNext( pNext_ )
      , renderPass( renderPass_ )
      , framebuffer( framebuffer_ )
      , renderArea( renderArea_ )
      , clearValueCount( static_cast<uint32_t>( clearValues_.size() ) )
      , pClearValues( clearValues_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      framebuffer = framebuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
    {
      renderArea = renderArea_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValueCount = clearValueCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pClearValues = pClearValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassBeginInfo & setClearValues( ArrayProxyNoTemporaries<const ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
      pClearValues    = clearValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassBeginInfo *>( this );
    }

    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassBeginInfo *>( this );
    }

    operator VkRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassBeginInfo *>( this );
    }

    operator VkRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, RenderPass const &, Framebuffer const &, Rect2D const &, uint32_t const &, const ClearValue * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassBeginInfo const & ) const = default;
#else
    bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) &&
             ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues );
#  endif
    }

    bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType           = StructureType::eRenderPassBeginInfo;
    const void *       pNext           = {};
    RenderPass         renderPass      = {};
    Framebuffer        framebuffer     = {};
    Rect2D             renderArea      = {};
    uint32_t           clearValueCount = {};
    const ClearValue * pClearValues    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassBeginInfo>
  {
    using Type = RenderPassBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
  {
    using Type = RenderPassBeginInfo;
  };

  // wrapper struct for struct VkSubpassDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription.html
  struct SubpassDescription
  {
    using NativeType = VkSubpassDescription;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescriptionFlags     flags_                   = {},
                                             PipelineBindPoint           pipelineBindPoint_       = PipelineBindPoint::eGraphics,
                                             uint32_t                    inputAttachmentCount_    = {},
                                             const AttachmentReference * pInputAttachments_       = {},
                                             uint32_t                    colorAttachmentCount_    = {},
                                             const AttachmentReference * pColorAttachments_       = {},
                                             const AttachmentReference * pResolveAttachments_     = {},
                                             const AttachmentReference * pDepthStencilAttachment_ = {},
                                             uint32_t                    preserveAttachmentCount_ = {},
                                             const uint32_t *            pPreserveAttachments_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags{ flags_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , inputAttachmentCount{ inputAttachmentCount_ }
      , pInputAttachments{ pInputAttachments_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachments{ pColorAttachments_ }
      , pResolveAttachments{ pResolveAttachments_ }
      , pDepthStencilAttachment{ pDepthStencilAttachment_ }
      , preserveAttachmentCount{ preserveAttachmentCount_ }
      , pPreserveAttachments{ pPreserveAttachments_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription( SubpassDescriptionFlags                                    flags_,
                        PipelineBindPoint                                          pipelineBindPoint_,
                        ArrayProxyNoTemporaries<const AttachmentReference> const & inputAttachments_,
                        ArrayProxyNoTemporaries<const AttachmentReference> const & colorAttachments_        = {},
                        ArrayProxyNoTemporaries<const AttachmentReference> const & resolveAttachments_      = {},
                        const AttachmentReference *                                pDepthStencilAttachment_ = {},
                        ArrayProxyNoTemporaries<const uint32_t> const &            preserveAttachments_     = {} )
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
      , pInputAttachments( inputAttachments_.data() )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
      , pColorAttachments( colorAttachments_.data() )
      , pResolveAttachments( resolveAttachments_.data() )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
      , pPreserveAttachments( preserveAttachments_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#    else
      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassDescription const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = inputAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPInputAttachments( const AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAttachments = pInputAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setInputAttachments( ArrayProxyNoTemporaries<const AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
      pInputAttachments    = inputAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPColorAttachments( const AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachments = pColorAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setColorAttachments( ArrayProxyNoTemporaries<const AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
      pColorAttachments    = colorAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments( const AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pResolveAttachments = pResolveAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setResolveAttachments( ArrayProxyNoTemporaries<const AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
      pResolveAttachments  = resolveAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment( const AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilAttachment = pDepthStencilAttachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = preserveAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pPreserveAttachments = pPreserveAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setPreserveAttachments( ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
      pPreserveAttachments    = preserveAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescription *>( this );
    }

    operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescription *>( this );
    }

    operator VkSubpassDescription const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassDescription *>( this );
    }

    operator VkSubpassDescription *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassDescription *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<SubpassDescriptionFlags const &,
               PipelineBindPoint const &,
               uint32_t const &,
               const AttachmentReference * const &,
               uint32_t const &,
               const AttachmentReference * const &,
               const AttachmentReference * const &,
               const AttachmentReference * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( flags,
                       pipelineBindPoint,
                       inputAttachmentCount,
                       pInputAttachments,
                       colorAttachmentCount,
                       pColorAttachments,
                       pResolveAttachments,
                       pDepthStencilAttachment,
                       preserveAttachmentCount,
                       pPreserveAttachments );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescription const & ) const = default;
#else
    bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) &&
             ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) &&
             ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
             ( pPreserveAttachments == rhs.pPreserveAttachments );
#  endif
    }

    bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    SubpassDescriptionFlags     flags                   = {};
    PipelineBindPoint           pipelineBindPoint       = PipelineBindPoint::eGraphics;
    uint32_t                    inputAttachmentCount    = {};
    const AttachmentReference * pInputAttachments       = {};
    uint32_t                    colorAttachmentCount    = {};
    const AttachmentReference * pColorAttachments       = {};
    const AttachmentReference * pResolveAttachments     = {};
    const AttachmentReference * pDepthStencilAttachment = {};
    uint32_t                    preserveAttachmentCount = {};
    const uint32_t *            pPreserveAttachments    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassDescription>
  {
    using Type = SubpassDescription;
  };
#endif

  // wrapper struct for struct VkSubpassDependency, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency.html
  struct SubpassDependency
  {
    using NativeType = VkSubpassDependency;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t           srcSubpass_      = {},
                                            uint32_t           dstSubpass_      = {},
                                            PipelineStageFlags srcStageMask_    = {},
                                            PipelineStageFlags dstStageMask_    = {},
                                            AccessFlags        srcAccessMask_   = {},
                                            AccessFlags        dstAccessMask_   = {},
                                            DependencyFlags    dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubpass{ srcSubpass_ }
      , dstSubpass{ dstSubpass_ }
      , srcStageMask{ srcStageMask_ }
      , dstStageMask{ dstStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , dependencyFlags{ dependencyFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) ) {}

    SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassDependency const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubpass = srcSubpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubpass = dstSubpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDependency *>( this );
    }

    operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDependency *>( this );
    }

    operator VkSubpassDependency const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassDependency *>( this );
    }

    operator VkSubpassDependency *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassDependency *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               uint32_t const &,
               PipelineStageFlags const &,
               PipelineStageFlags const &,
               AccessFlags const &,
               AccessFlags const &,
               DependencyFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDependency const & ) const = default;
#else
    bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
             ( dependencyFlags == rhs.dependencyFlags );
#  endif
    }

    bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t           srcSubpass      = {};
    uint32_t           dstSubpass      = {};
    PipelineStageFlags srcStageMask    = {};
    PipelineStageFlags dstStageMask    = {};
    AccessFlags        srcAccessMask   = {};
    AccessFlags        dstAccessMask   = {};
    DependencyFlags    dependencyFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassDependency>
  {
    using Type = SubpassDependency;
  };
#endif

  // wrapper struct for struct VkRenderPassCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo.html
  struct RenderPassCreateInfo
  {
    using NativeType = VkRenderPassCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateFlags         flags_           = {},
                                               uint32_t                      attachmentCount_ = {},
                                               const AttachmentDescription * pAttachments_    = {},
                                               uint32_t                      subpassCount_    = {},
                                               const SubpassDescription *    pSubpasses_      = {},
                                               uint32_t                      dependencyCount_ = {},
                                               const SubpassDependency *     pDependencies_   = {},
                                               const void *                  pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , attachmentCount{ attachmentCount_ }
      , pAttachments{ pAttachments_ }
      , subpassCount{ subpassCount_ }
      , pSubpasses{ pSubpasses_ }
      , dependencyCount{ dependencyCount_ }
      , pDependencies{ pDependencies_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo( RenderPassCreateFlags                                        flags_,
                          ArrayProxyNoTemporaries<const AttachmentDescription> const & attachments_,
                          ArrayProxyNoTemporaries<const SubpassDescription> const &    subpasses_    = {},
                          ArrayProxyNoTemporaries<const SubpassDependency> const &     dependencies_ = {},
                          const void *                                                 pNext_        = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
      , pSubpasses( subpasses_.data() )
      , dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
      , pDependencies( dependencies_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setAttachments( ArrayProxyNoTemporaries<const AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubpasses = pSubpasses_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setSubpasses( ArrayProxyNoTemporaries<const SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( subpasses_.size() );
      pSubpasses   = subpasses_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      pDependencies = pDependencies_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setDependencies( ArrayProxyNoTemporaries<const SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
      pDependencies   = dependencies_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreateInfo *>( this );
    }

    operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreateInfo *>( this );
    }

    operator VkRenderPassCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassCreateInfo *>( this );
    }

    operator VkRenderPassCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RenderPassCreateFlags const &,
               uint32_t const &,
               const AttachmentDescription * const &,
               uint32_t const &,
               const SubpassDescription * const &,
               uint32_t const &,
               const SubpassDependency * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
             ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
             ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies );
#  endif
    }

    bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType           = StructureType::eRenderPassCreateInfo;
    const void *                  pNext           = {};
    RenderPassCreateFlags         flags           = {};
    uint32_t                      attachmentCount = {};
    const AttachmentDescription * pAttachments    = {};
    uint32_t                      subpassCount    = {};
    const SubpassDescription *    pSubpasses      = {};
    uint32_t                      dependencyCount = {};
    const SubpassDependency *     pDependencies   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassCreateInfo>
  {
    using Type = RenderPassCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
  {
    using Type = RenderPassCreateInfo;
  };

  // wrapper struct for struct VkSubpassDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription2.html
  struct SubpassDescription2
  {
    using NativeType = VkSubpassDescription2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassDescription2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescriptionFlags      flags_                   = {},
                                              PipelineBindPoint            pipelineBindPoint_       = PipelineBindPoint::eGraphics,
                                              uint32_t                     viewMask_                = {},
                                              uint32_t                     inputAttachmentCount_    = {},
                                              const AttachmentReference2 * pInputAttachments_       = {},
                                              uint32_t                     colorAttachmentCount_    = {},
                                              const AttachmentReference2 * pColorAttachments_       = {},
                                              const AttachmentReference2 * pResolveAttachments_     = {},
                                              const AttachmentReference2 * pDepthStencilAttachment_ = {},
                                              uint32_t                     preserveAttachmentCount_ = {},
                                              const uint32_t *             pPreserveAttachments_    = {},
                                              const void *                 pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , pipelineBindPoint{ pipelineBindPoint_ }
      , viewMask{ viewMask_ }
      , inputAttachmentCount{ inputAttachmentCount_ }
      , pInputAttachments{ pInputAttachments_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachments{ pColorAttachments_ }
      , pResolveAttachments{ pResolveAttachments_ }
      , pDepthStencilAttachment{ pDepthStencilAttachment_ }
      , preserveAttachmentCount{ preserveAttachmentCount_ }
      , pPreserveAttachments{ pPreserveAttachments_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2( SubpassDescriptionFlags                                     flags_,
                         PipelineBindPoint                                           pipelineBindPoint_,
                         uint32_t                                                    viewMask_,
                         ArrayProxyNoTemporaries<const AttachmentReference2> const & inputAttachments_,
                         ArrayProxyNoTemporaries<const AttachmentReference2> const & colorAttachments_        = {},
                         ArrayProxyNoTemporaries<const AttachmentReference2> const & resolveAttachments_      = {},
                         const AttachmentReference2 *                                pDepthStencilAttachment_ = {},
                         ArrayProxyNoTemporaries<const uint32_t> const &             preserveAttachments_     = {},
                         const void *                                                pNext_                   = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , viewMask( viewMask_ )
      , inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
      , pInputAttachments( inputAttachments_.data() )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
      , pColorAttachments( colorAttachments_.data() )
      , pResolveAttachments( resolveAttachments_.data() )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
      , pPreserveAttachments( preserveAttachments_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#    else
      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassDescription2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = inputAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPInputAttachments( const AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAttachments = pInputAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setInputAttachments( ArrayProxyNoTemporaries<const AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
      pInputAttachments    = inputAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPColorAttachments( const AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachments = pColorAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setColorAttachments( ArrayProxyNoTemporaries<const AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
      pColorAttachments    = colorAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments( const AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pResolveAttachments = pResolveAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setResolveAttachments( ArrayProxyNoTemporaries<const AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
      pResolveAttachments  = resolveAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment( const AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilAttachment = pDepthStencilAttachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = preserveAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pPreserveAttachments = pPreserveAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setPreserveAttachments( ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
      pPreserveAttachments    = preserveAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescription2 *>( this );
    }

    operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescription2 *>( this );
    }

    operator VkSubpassDescription2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassDescription2 *>( this );
    }

    operator VkSubpassDescription2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassDescription2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               SubpassDescriptionFlags const &,
               PipelineBindPoint const &,
               uint32_t const &,
               uint32_t const &,
               const AttachmentReference2 * const &,
               uint32_t const &,
               const AttachmentReference2 * const &,
               const AttachmentReference2 * const &,
               const AttachmentReference2 * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       pipelineBindPoint,
                       viewMask,
                       inputAttachmentCount,
                       pInputAttachments,
                       colorAttachmentCount,
                       pColorAttachments,
                       pResolveAttachments,
                       pDepthStencilAttachment,
                       preserveAttachmentCount,
                       pPreserveAttachments );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescription2 const & ) const = default;
#else
    bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
             ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
             ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
             ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments );
#  endif
    }

    bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                   = StructureType::eSubpassDescription2;
    const void *                 pNext                   = {};
    SubpassDescriptionFlags      flags                   = {};
    PipelineBindPoint            pipelineBindPoint       = PipelineBindPoint::eGraphics;
    uint32_t                     viewMask                = {};
    uint32_t                     inputAttachmentCount    = {};
    const AttachmentReference2 * pInputAttachments       = {};
    uint32_t                     colorAttachmentCount    = {};
    const AttachmentReference2 * pColorAttachments       = {};
    const AttachmentReference2 * pResolveAttachments     = {};
    const AttachmentReference2 * pDepthStencilAttachment = {};
    uint32_t                     preserveAttachmentCount = {};
    const uint32_t *             pPreserveAttachments    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassDescription2>
  {
    using Type = SubpassDescription2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassDescription2>
  {
    using Type = SubpassDescription2;
  };

  using SubpassDescription2KHR = SubpassDescription2;

  // wrapper struct for struct VkSubpassDependency2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency2.html
  struct SubpassDependency2
  {
    using NativeType = VkSubpassDependency2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassDependency2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t           srcSubpass_      = {},
                                             uint32_t           dstSubpass_      = {},
                                             PipelineStageFlags srcStageMask_    = {},
                                             PipelineStageFlags dstStageMask_    = {},
                                             AccessFlags        srcAccessMask_   = {},
                                             AccessFlags        dstAccessMask_   = {},
                                             DependencyFlags    dependencyFlags_ = {},
                                             int32_t            viewOffset_      = {},
                                             const void *       pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcSubpass{ srcSubpass_ }
      , dstSubpass{ dstSubpass_ }
      , srcStageMask{ srcStageMask_ }
      , dstStageMask{ dstStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , dependencyFlags{ dependencyFlags_ }
      , viewOffset{ viewOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) ) {}

    SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassDependency2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubpass = srcSubpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubpass = dstSubpass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      viewOffset = viewOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDependency2 *>( this );
    }

    operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDependency2 *>( this );
    }

    operator VkSubpassDependency2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassDependency2 *>( this );
    }

    operator VkSubpassDependency2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassDependency2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               uint32_t const &,
               PipelineStageFlags const &,
               PipelineStageFlags const &,
               AccessFlags const &,
               AccessFlags const &,
               DependencyFlags const &,
               int32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDependency2 const & ) const = default;
#else
    bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) &&
             ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset );
#  endif
    }

    bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType           = StructureType::eSubpassDependency2;
    const void *       pNext           = {};
    uint32_t           srcSubpass      = {};
    uint32_t           dstSubpass      = {};
    PipelineStageFlags srcStageMask    = {};
    PipelineStageFlags dstStageMask    = {};
    AccessFlags        srcAccessMask   = {};
    AccessFlags        dstAccessMask   = {};
    DependencyFlags    dependencyFlags = {};
    int32_t            viewOffset      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassDependency2>
  {
    using Type = SubpassDependency2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassDependency2>
  {
    using Type = SubpassDependency2;
  };

  using SubpassDependency2KHR = SubpassDependency2;

  // wrapper struct for struct VkRenderPassCreateInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo2.html
  struct RenderPassCreateInfo2
  {
    using NativeType = VkRenderPassCreateInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassCreateInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateFlags          flags_                   = {},
                                                uint32_t                       attachmentCount_         = {},
                                                const AttachmentDescription2 * pAttachments_            = {},
                                                uint32_t                       subpassCount_            = {},
                                                const SubpassDescription2 *    pSubpasses_              = {},
                                                uint32_t                       dependencyCount_         = {},
                                                const SubpassDependency2 *     pDependencies_           = {},
                                                uint32_t                       correlatedViewMaskCount_ = {},
                                                const uint32_t *               pCorrelatedViewMasks_    = {},
                                                const void *                   pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , attachmentCount{ attachmentCount_ }
      , pAttachments{ pAttachments_ }
      , subpassCount{ subpassCount_ }
      , pSubpasses{ pSubpasses_ }
      , dependencyCount{ dependencyCount_ }
      , pDependencies{ pDependencies_ }
      , correlatedViewMaskCount{ correlatedViewMaskCount_ }
      , pCorrelatedViewMasks{ pCorrelatedViewMasks_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2( RenderPassCreateFlags                                         flags_,
                           ArrayProxyNoTemporaries<const AttachmentDescription2> const & attachments_,
                           ArrayProxyNoTemporaries<const SubpassDescription2> const &    subpasses_           = {},
                           ArrayProxyNoTemporaries<const SubpassDependency2> const &     dependencies_        = {},
                           ArrayProxyNoTemporaries<const uint32_t> const &               correlatedViewMasks_ = {},
                           const void *                                                  pNext_               = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
      , pSubpasses( subpasses_.data() )
      , dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
      , pDependencies( dependencies_.data() )
      , correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) )
      , pCorrelatedViewMasks( correlatedViewMasks_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setAttachments( ArrayProxyNoTemporaries<const AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubpasses = pSubpasses_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setSubpasses( ArrayProxyNoTemporaries<const SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( subpasses_.size() );
      pSubpasses   = subpasses_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      pDependencies = pDependencies_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setDependencies( ArrayProxyNoTemporaries<const SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
      pDependencies   = dependencies_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      correlatedViewMaskCount = correlatedViewMaskCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCorrelatedViewMasks = pCorrelatedViewMasks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setCorrelatedViewMasks( ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
      pCorrelatedViewMasks    = correlatedViewMasks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreateInfo2 *>( this );
    }

    operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreateInfo2 *>( this );
    }

    operator VkRenderPassCreateInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassCreateInfo2 *>( this );
    }

    operator VkRenderPassCreateInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassCreateInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RenderPassCreateFlags const &,
               uint32_t const &,
               const AttachmentDescription2 * const &,
               uint32_t const &,
               const SubpassDescription2 * const &,
               uint32_t const &,
               const SubpassDependency2 * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       attachmentCount,
                       pAttachments,
                       subpassCount,
                       pSubpasses,
                       dependencyCount,
                       pDependencies,
                       correlatedViewMaskCount,
                       pCorrelatedViewMasks );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
#else
    bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) &&
             ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
             ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) &&
             ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
#  endif
    }

    bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType                   = StructureType::eRenderPassCreateInfo2;
    const void *                   pNext                   = {};
    RenderPassCreateFlags          flags                   = {};
    uint32_t                       attachmentCount         = {};
    const AttachmentDescription2 * pAttachments            = {};
    uint32_t                       subpassCount            = {};
    const SubpassDescription2 *    pSubpasses              = {};
    uint32_t                       dependencyCount         = {};
    const SubpassDependency2 *     pDependencies           = {};
    uint32_t                       correlatedViewMaskCount = {};
    const uint32_t *               pCorrelatedViewMasks    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassCreateInfo2>
  {
    using Type = RenderPassCreateInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
  {
    using Type = RenderPassCreateInfo2;
  };

  using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;

  // wrapper struct for struct VkRenderPassCreationControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationControlEXT.html
  struct RenderPassCreationControlEXT
  {
    using NativeType = VkRenderPassCreationControlEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassCreationControlEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , disallowMerging{ disallowMerging_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreationControlEXT( *reinterpret_cast<RenderPassCreationControlEXT const *>( &rhs ) )
    {
    }

    RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassCreationControlEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT
    {
      disallowMerging = disallowMerging_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreationControlEXT *>( this );
    }

    operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreationControlEXT *>( this );
    }

    operator VkRenderPassCreationControlEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassCreationControlEXT *>( this );
    }

    operator VkRenderPassCreationControlEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassCreationControlEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, disallowMerging );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreationControlEXT const & ) const = default;
#else
    bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging );
#  endif
    }

    bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eRenderPassCreationControlEXT;
    const void *  pNext           = {};
    Bool32        disallowMerging = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassCreationControlEXT>
  {
    using Type = RenderPassCreationControlEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreationControlEXT>
  {
    using Type = RenderPassCreationControlEXT;
  };

  // wrapper struct for struct VkRenderPassCreationFeedbackInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackInfoEXT.html
  struct RenderPassCreationFeedbackInfoEXT
  {
    using NativeType = VkRenderPassCreationFeedbackInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : postMergeSubpassCount{ postMergeSubpassCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackInfoEXT const *>( &rhs ) )
    {
    }

    RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassCreationFeedbackInfoEXT const *>( &rhs );
      return *this;
    }

    operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassCreationFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassCreationFeedbackInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( postMergeSubpassCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( postMergeSubpassCount == rhs.postMergeSubpassCount );
#  endif
    }

    bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t postMergeSubpassCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassCreationFeedbackInfoEXT>
  {
    using Type = RenderPassCreationFeedbackInfoEXT;
  };
#endif

  // wrapper struct for struct VkRenderPassCreationFeedbackCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackCreateInfoEXT.html
  struct RenderPassCreationFeedbackCreateInfoEXT
  {
    using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {},
                                                                  const void *                        pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pRenderPassFeedback{ pRenderPassFeedback_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs ) )
    {
    }

    RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassCreationFeedbackCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT &
      setPRenderPassFeedback( RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      pRenderPassFeedback = pRenderPassFeedback_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassCreationFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassCreationFeedbackCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, RenderPassCreationFeedbackInfoEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pRenderPassFeedback );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback );
#  endif
    }

    bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType               = StructureType::eRenderPassCreationFeedbackCreateInfoEXT;
    const void *                        pNext               = {};
    RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassCreationFeedbackCreateInfoEXT>
  {
    using Type = RenderPassCreationFeedbackCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreationFeedbackCreateInfoEXT>
  {
    using Type = RenderPassCreationFeedbackCreateInfoEXT;
  };

  // wrapper struct for struct VkRenderPassFragmentDensityMapCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapCreateInfoEXT.html
  struct RenderPassFragmentDensityMapCreateInfoEXT
  {
    using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( AttachmentReference fragmentDensityMapAttachment_ = {},
                                                                    const void *        pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityMapAttachment{ fragmentDensityMapAttachment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
    {
    }

    RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT &
      setFragmentDensityMapAttachment( AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, AttachmentReference const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityMapAttachment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
#  endif
    }

    bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                        = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
    const void *        pNext                        = {};
    AttachmentReference fragmentDensityMapAttachment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassFragmentDensityMapCreateInfoEXT>
  {
    using Type = RenderPassFragmentDensityMapCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
  {
    using Type = RenderPassFragmentDensityMapCreateInfoEXT;
  };

  // wrapper struct for struct VkRenderPassFragmentDensityMapOffsetEndInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapOffsetEndInfoEXT.html
  struct RenderPassFragmentDensityMapOffsetEndInfoEXT
  {
    using NativeType = VkRenderPassFragmentDensityMapOffsetEndInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT( uint32_t         fragmentDensityOffsetCount_ = {},
                                                                       const Offset2D * pFragmentDensityOffsets_    = {},
                                                                       const void *     pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fragmentDensityOffsetCount{ fragmentDensityOffsetCount_ }
      , pFragmentDensityOffsets{ pFragmentDensityOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassFragmentDensityMapOffsetEndInfoEXT( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassFragmentDensityMapOffsetEndInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapOffsetEndInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassFragmentDensityMapOffsetEndInfoEXT( ArrayProxyNoTemporaries<const Offset2D> const & fragmentDensityOffsets_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) )
      , pFragmentDensityOffsets( fragmentDensityOffsets_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassFragmentDensityMapOffsetEndInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT &
      setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityOffsetCount = fragmentDensityOffsetCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT &
      setPFragmentDensityOffsets( const Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pFragmentDensityOffsets = pFragmentDensityOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassFragmentDensityMapOffsetEndInfoEXT &
      setFragmentDensityOffsets( ArrayProxyNoTemporaries<const Offset2D> const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityOffsetCount = static_cast<uint32_t>( fragmentDensityOffsets_.size() );
      pFragmentDensityOffsets    = fragmentDensityOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassFragmentDensityMapOffsetEndInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassFragmentDensityMapOffsetEndInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassFragmentDensityMapOffsetEndInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassFragmentDensityMapOffsetEndInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Offset2D * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassFragmentDensityMapOffsetEndInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) &&
             ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets );
#  endif
    }

    bool operator!=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                      = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT;
    const void *     pNext                      = {};
    uint32_t         fragmentDensityOffsetCount = {};
    const Offset2D * pFragmentDensityOffsets    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassFragmentDensityMapOffsetEndInfoEXT>
  {
    using Type = RenderPassFragmentDensityMapOffsetEndInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT>
  {
    using Type = RenderPassFragmentDensityMapOffsetEndInfoEXT;
  };

  using SubpassFragmentDensityMapOffsetEndInfoQCOM = RenderPassFragmentDensityMapOffsetEndInfoEXT;

  // wrapper struct for struct VkRenderPassInputAttachmentAspectCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassInputAttachmentAspectCreateInfo.html
  struct RenderPassInputAttachmentAspectCreateInfo
  {
    using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassInputAttachmentAspectCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t                               aspectReferenceCount_ = {},
                                                                    const InputAttachmentAspectReference * pAspectReferences_    = {},
                                                                    const void *                           pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , aspectReferenceCount{ aspectReferenceCount_ }
      , pAspectReferences{ pAspectReferences_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassInputAttachmentAspectCreateInfo( ArrayProxyNoTemporaries<const InputAttachmentAspectReference> const & aspectReferences_,
                                               const void *                                                          pNext_ = nullptr )
      : pNext( pNext_ ), aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectReferenceCount = aspectReferenceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
      setPAspectReferences( const InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
    {
      pAspectReferences = pAspectReferences_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassInputAttachmentAspectCreateInfo &
      setAspectReferences( ArrayProxyNoTemporaries<const InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
      pAspectReferences    = aspectReferences_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

    operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

    operator VkRenderPassInputAttachmentAspectCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

    operator VkRenderPassInputAttachmentAspectCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const InputAttachmentAspectReference * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) &&
             ( pAspectReferences == rhs.pAspectReferences );
#  endif
    }

    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType                = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
    const void *                           pNext                = {};
    uint32_t                               aspectReferenceCount = {};
    const InputAttachmentAspectReference * pAspectReferences    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassInputAttachmentAspectCreateInfo>
  {
    using Type = RenderPassInputAttachmentAspectCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
  {
    using Type = RenderPassInputAttachmentAspectCreateInfo;
  };

  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;

  // wrapper struct for struct VkRenderPassMultiviewCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassMultiviewCreateInfo.html
  struct RenderPassMultiviewCreateInfo
  {
    using NativeType = VkRenderPassMultiviewCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassMultiviewCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t         subpassCount_         = {},
                                                        const uint32_t * pViewMasks_           = {},
                                                        uint32_t         dependencyCount_      = {},
                                                        const int32_t *  pViewOffsets_         = {},
                                                        uint32_t         correlationMaskCount_ = {},
                                                        const uint32_t * pCorrelationMasks_    = {},
                                                        const void *     pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subpassCount{ subpassCount_ }
      , pViewMasks{ pViewMasks_ }
      , dependencyCount{ dependencyCount_ }
      , pViewOffsets{ pViewOffsets_ }
      , correlationMaskCount{ correlationMaskCount_ }
      , pCorrelationMasks{ pCorrelationMasks_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo( ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_,
                                   ArrayProxyNoTemporaries<const int32_t> const &  viewOffsets_      = {},
                                   ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {},
                                   const void *                                    pNext_            = nullptr )
      : pNext( pNext_ )
      , subpassCount( static_cast<uint32_t>( viewMasks_.size() ) )
      , pViewMasks( viewMasks_.data() )
      , dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) )
      , pViewOffsets( viewOffsets_.data() )
      , correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) )
      , pCorrelationMasks( correlationMasks_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewMasks = pViewMasks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setViewMasks( ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( viewMasks_.size() );
      pViewMasks   = viewMasks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewOffsets = pViewOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setViewOffsets( ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
      pViewOffsets    = viewOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      correlationMaskCount = correlationMaskCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCorrelationMasks = pCorrelationMasks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setCorrelationMasks( ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
      pCorrelationMasks    = correlationMasks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>( this );
    }

    operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo *>( this );
    }

    operator VkRenderPassMultiviewCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>( this );
    }

    operator VkRenderPassMultiviewCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassMultiviewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const uint32_t * const &,
               uint32_t const &,
               const int32_t * const &,
               uint32_t const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) &&
             ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) &&
             ( pCorrelationMasks == rhs.pCorrelationMasks );
#  endif
    }

    bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                = StructureType::eRenderPassMultiviewCreateInfo;
    const void *     pNext                = {};
    uint32_t         subpassCount         = {};
    const uint32_t * pViewMasks           = {};
    uint32_t         dependencyCount      = {};
    const int32_t *  pViewOffsets         = {};
    uint32_t         correlationMaskCount = {};
    const uint32_t * pCorrelationMasks    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassMultiviewCreateInfo>
  {
    using Type = RenderPassMultiviewCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
  {
    using Type = RenderPassMultiviewCreateInfo;
  };

  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;

  // wrapper struct for struct VkRenderPassPerformanceCountersByRegionBeginInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassPerformanceCountersByRegionBeginInfoARM.html
  struct RenderPassPerformanceCountersByRegionBeginInfoARM
  {
    using NativeType = VkRenderPassPerformanceCountersByRegionBeginInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassPerformanceCountersByRegionBeginInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassPerformanceCountersByRegionBeginInfoARM( uint32_t              counterAddressCount_ = {},
                                                                            const DeviceAddress * pCounterAddresses_   = {},
                                                                            Bool32                serializeRegions_    = {},
                                                                            uint32_t              counterIndexCount_   = {},
                                                                            uint32_t *            pCounterIndices_     = {},
                                                                            void *                pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , counterAddressCount{ counterAddressCount_ }
      , pCounterAddresses{ pCounterAddresses_ }
      , serializeRegions{ serializeRegions_ }
      , counterIndexCount{ counterIndexCount_ }
      , pCounterIndices{ pCounterIndices_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      RenderPassPerformanceCountersByRegionBeginInfoARM( RenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassPerformanceCountersByRegionBeginInfoARM( VkRenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassPerformanceCountersByRegionBeginInfoARM( *reinterpret_cast<RenderPassPerformanceCountersByRegionBeginInfoARM const *>( &rhs ) )
    {
    }

    RenderPassPerformanceCountersByRegionBeginInfoARM &
      operator=( RenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassPerformanceCountersByRegionBeginInfoARM & operator=( VkRenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassPerformanceCountersByRegionBeginInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM & setCounterAddressCount( uint32_t counterAddressCount_ ) VULKAN_HPP_NOEXCEPT
    {
      counterAddressCount = counterAddressCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM &
      setPCounterAddresses( const DeviceAddress * pCounterAddresses_ ) VULKAN_HPP_NOEXCEPT
    {
      pCounterAddresses = pCounterAddresses_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM & setSerializeRegions( Bool32 serializeRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      serializeRegions = serializeRegions_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      counterIndexCount = counterIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassPerformanceCountersByRegionBeginInfoARM & setPCounterIndices( uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pCounterIndices = pCounterIndices_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassPerformanceCountersByRegionBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassPerformanceCountersByRegionBeginInfoARM *>( this );
    }

    operator VkRenderPassPerformanceCountersByRegionBeginInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassPerformanceCountersByRegionBeginInfoARM *>( this );
    }

    operator VkRenderPassPerformanceCountersByRegionBeginInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassPerformanceCountersByRegionBeginInfoARM *>( this );
    }

    operator VkRenderPassPerformanceCountersByRegionBeginInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassPerformanceCountersByRegionBeginInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, const DeviceAddress * const &, Bool32 const &, uint32_t const &, uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, counterAddressCount, pCounterAddresses, serializeRegions, counterIndexCount, pCounterIndices );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassPerformanceCountersByRegionBeginInfoARM const & ) const = default;
#else
    bool operator==( RenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterAddressCount == rhs.counterAddressCount ) &&
             ( pCounterAddresses == rhs.pCounterAddresses ) && ( serializeRegions == rhs.serializeRegions ) && ( counterIndexCount == rhs.counterIndexCount ) &&
             ( pCounterIndices == rhs.pCounterIndices );
#  endif
    }

    bool operator!=( RenderPassPerformanceCountersByRegionBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType               = StructureType::eRenderPassPerformanceCountersByRegionBeginInfoARM;
    void *                pNext               = {};
    uint32_t              counterAddressCount = {};
    const DeviceAddress * pCounterAddresses   = {};
    Bool32                serializeRegions    = {};
    uint32_t              counterIndexCount   = {};
    uint32_t *            pCounterIndices     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassPerformanceCountersByRegionBeginInfoARM>
  {
    using Type = RenderPassPerformanceCountersByRegionBeginInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassPerformanceCountersByRegionBeginInfoARM>
  {
    using Type = RenderPassPerformanceCountersByRegionBeginInfoARM;
  };

  // wrapper struct for struct VkSubpassSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassSampleLocationsEXT.html
  struct SubpassSampleLocationsEXT
  {
    using NativeType = VkSubpassSampleLocationsEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpassIndex{ subpassIndex_ }
      , sampleLocationsInfo{ sampleLocationsInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
    {
    }

    SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassIndex = subpassIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo( SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassSampleLocationsEXT *>( this );
    }

    operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassSampleLocationsEXT *>( this );
    }

    operator VkSubpassSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassSampleLocationsEXT *>( this );
    }

    operator VkSubpassSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassSampleLocationsEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, SampleLocationsInfoEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( subpassIndex, sampleLocationsInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
#else
    bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
#  endif
    }

    bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t               subpassIndex        = {};
    SampleLocationsInfoEXT sampleLocationsInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassSampleLocationsEXT>
  {
    using Type = SubpassSampleLocationsEXT;
  };
#endif

  // wrapper struct for struct VkRenderPassSampleLocationsBeginInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSampleLocationsBeginInfoEXT.html
  struct RenderPassSampleLocationsBeginInfoEXT
  {
    using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassSampleLocationsBeginInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t                             attachmentInitialSampleLocationsCount_ = {},
                                                                const AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_     = {},
                                                                uint32_t                             postSubpassSampleLocationsCount_       = {},
                                                                const SubpassSampleLocationsEXT *    pPostSubpassSampleLocations_           = {},
                                                                const void *                         pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , attachmentInitialSampleLocationsCount{ attachmentInitialSampleLocationsCount_ }
      , pAttachmentInitialSampleLocations{ pAttachmentInitialSampleLocations_ }
      , postSubpassSampleLocationsCount{ postSubpassSampleLocationsCount_ }
      , pPostSubpassSampleLocations{ pPostSubpassSampleLocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT( ArrayProxyNoTemporaries<const AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_,
                                           ArrayProxyNoTemporaries<const SubpassSampleLocationsEXT> const &    postSubpassSampleLocations_ = {},
                                           const void *                                                        pNext_                      = nullptr )
      : pNext( pNext_ )
      , attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) )
      , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() )
      , postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) )
      , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
      setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
      setPAttachmentInitialSampleLocations( const AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations(
      ArrayProxyNoTemporaries<const AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
      pAttachmentInitialSampleLocations     = attachmentInitialSampleLocations_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
      setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
      setPPostSubpassSampleLocations( const SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT &
      setPostSubpassSampleLocations( ArrayProxyNoTemporaries<const SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
      pPostSubpassSampleLocations     = postSubpassSampleLocations_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

    operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

    operator VkRenderPassSampleLocationsBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

    operator VkRenderPassSampleLocationsBeginInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const AttachmentSampleLocationsEXT * const &,
               uint32_t const &,
               const SubpassSampleLocationsEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) &&
             ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) &&
             ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
#  endif
    }

    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType                                 = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
    const void *                         pNext                                 = {};
    uint32_t                             attachmentInitialSampleLocationsCount = {};
    const AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations     = {};
    uint32_t                             postSubpassSampleLocationsCount       = {};
    const SubpassSampleLocationsEXT *    pPostSubpassSampleLocations           = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassSampleLocationsBeginInfoEXT>
  {
    using Type = RenderPassSampleLocationsBeginInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
  {
    using Type = RenderPassSampleLocationsBeginInfoEXT;
  };

  // wrapper struct for struct VkRenderPassStripeInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeInfoARM.html
  struct RenderPassStripeInfoARM
  {
    using NativeType = VkRenderPassStripeInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassStripeInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( Rect2D stripeArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stripeArea{ stripeArea_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassStripeInfoARM( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassStripeInfoARM( *reinterpret_cast<RenderPassStripeInfoARM const *>( &rhs ) )
    {
    }

    RenderPassStripeInfoARM & operator=( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassStripeInfoARM & operator=( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassStripeInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setStripeArea( Rect2D const & stripeArea_ ) VULKAN_HPP_NOEXCEPT
    {
      stripeArea = stripeArea_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassStripeInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassStripeInfoARM *>( this );
    }

    operator VkRenderPassStripeInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassStripeInfoARM *>( this );
    }

    operator VkRenderPassStripeInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassStripeInfoARM *>( this );
    }

    operator VkRenderPassStripeInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassStripeInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Rect2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stripeArea );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassStripeInfoARM const & ) const = default;
#else
    bool operator==( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeArea == rhs.stripeArea );
#  endif
    }

    bool operator!=( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eRenderPassStripeInfoARM;
    const void *  pNext      = {};
    Rect2D        stripeArea = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassStripeInfoARM>
  {
    using Type = RenderPassStripeInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassStripeInfoARM>
  {
    using Type = RenderPassStripeInfoARM;
  };

  // wrapper struct for struct VkRenderPassStripeBeginInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeBeginInfoARM.html
  struct RenderPassStripeBeginInfoARM
  {
    using NativeType = VkRenderPassStripeBeginInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassStripeBeginInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( uint32_t                        stripeInfoCount_ = {},
                                                       const RenderPassStripeInfoARM * pStripeInfos_    = {},
                                                       const void *                    pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stripeInfoCount{ stripeInfoCount_ }
      , pStripeInfos{ pStripeInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassStripeBeginInfoARM( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassStripeBeginInfoARM( *reinterpret_cast<RenderPassStripeBeginInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassStripeBeginInfoARM( ArrayProxyNoTemporaries<const RenderPassStripeInfoARM> const & stripeInfos_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), stripeInfoCount( static_cast<uint32_t>( stripeInfos_.size() ) ), pStripeInfos( stripeInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassStripeBeginInfoARM & operator=( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassStripeBeginInfoARM & operator=( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassStripeBeginInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setStripeInfoCount( uint32_t stripeInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stripeInfoCount = stripeInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPStripeInfos( const RenderPassStripeInfoARM * pStripeInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pStripeInfos = pStripeInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassStripeBeginInfoARM & setStripeInfos( ArrayProxyNoTemporaries<const RenderPassStripeInfoARM> const & stripeInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      stripeInfoCount = static_cast<uint32_t>( stripeInfos_.size() );
      pStripeInfos    = stripeInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassStripeBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassStripeBeginInfoARM *>( this );
    }

    operator VkRenderPassStripeBeginInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassStripeBeginInfoARM *>( this );
    }

    operator VkRenderPassStripeBeginInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassStripeBeginInfoARM *>( this );
    }

    operator VkRenderPassStripeBeginInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassStripeBeginInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const RenderPassStripeInfoARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stripeInfoCount, pStripeInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassStripeBeginInfoARM const & ) const = default;
#else
    bool operator==( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeInfoCount == rhs.stripeInfoCount ) && ( pStripeInfos == rhs.pStripeInfos );
#  endif
    }

    bool operator!=( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType           = StructureType::eRenderPassStripeBeginInfoARM;
    const void *                    pNext           = {};
    uint32_t                        stripeInfoCount = {};
    const RenderPassStripeInfoARM * pStripeInfos    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassStripeBeginInfoARM>
  {
    using Type = RenderPassStripeBeginInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassStripeBeginInfoARM>
  {
    using Type = RenderPassStripeBeginInfoARM;
  };

  // wrapper struct for struct VkSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSubmitInfo.html
  struct SemaphoreSubmitInfo
  {
    using NativeType = VkSemaphoreSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( Semaphore           semaphore_   = {},
                                              uint64_t            value_       = {},
                                              PipelineStageFlags2 stageMask_   = {},
                                              uint32_t            deviceIndex_ = {},
                                              const void *        pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , value{ value_ }
      , stageMask{ stageMask_ }
      , deviceIndex{ deviceIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
    {
    }

    SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
    {
      value = value_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      stageMask = stageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndex = deviceIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreSubmitInfo *>( this );
    }

    operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreSubmitInfo *>( this );
    }

    operator VkSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreSubmitInfo *>( this );
    }

    operator VkSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, uint64_t const &, PipelineStageFlags2 const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreSubmitInfo const & ) const = default;
#else
    bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) &&
             ( deviceIndex == rhs.deviceIndex );
#  endif
    }

    bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType       = StructureType::eSemaphoreSubmitInfo;
    const void *        pNext       = {};
    Semaphore           semaphore   = {};
    uint64_t            value       = {};
    PipelineStageFlags2 stageMask   = {};
    uint32_t            deviceIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreSubmitInfo>
  {
    using Type = SemaphoreSubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
  {
    using Type = SemaphoreSubmitInfo;
  };

  using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;

  // wrapper struct for struct VkRenderPassStripeSubmitInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeSubmitInfoARM.html
  struct RenderPassStripeSubmitInfoARM
  {
    using NativeType = VkRenderPassStripeSubmitInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassStripeSubmitInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( uint32_t                    stripeSemaphoreInfoCount_ = {},
                                                        const SemaphoreSubmitInfo * pStripeSemaphoreInfos_    = {},
                                                        const void *                pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stripeSemaphoreInfoCount{ stripeSemaphoreInfoCount_ }
      , pStripeSemaphoreInfos{ pStripeSemaphoreInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassStripeSubmitInfoARM( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassStripeSubmitInfoARM( *reinterpret_cast<RenderPassStripeSubmitInfoARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassStripeSubmitInfoARM( ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const & stripeSemaphoreInfos_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , stripeSemaphoreInfoCount( static_cast<uint32_t>( stripeSemaphoreInfos_.size() ) )
      , pStripeSemaphoreInfos( stripeSemaphoreInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassStripeSubmitInfoARM & operator=( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassStripeSubmitInfoARM & operator=( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassStripeSubmitInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfoCount( uint32_t stripeSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stripeSemaphoreInfoCount = stripeSemaphoreInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPStripeSemaphoreInfos( const SemaphoreSubmitInfo * pStripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pStripeSemaphoreInfos = pStripeSemaphoreInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassStripeSubmitInfoARM &
      setStripeSemaphoreInfos( ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const & stripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      stripeSemaphoreInfoCount = static_cast<uint32_t>( stripeSemaphoreInfos_.size() );
      pStripeSemaphoreInfos    = stripeSemaphoreInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassStripeSubmitInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassStripeSubmitInfoARM *>( this );
    }

    operator VkRenderPassStripeSubmitInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassStripeSubmitInfoARM *>( this );
    }

    operator VkRenderPassStripeSubmitInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassStripeSubmitInfoARM *>( this );
    }

    operator VkRenderPassStripeSubmitInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassStripeSubmitInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const SemaphoreSubmitInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stripeSemaphoreInfoCount, pStripeSemaphoreInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassStripeSubmitInfoARM const & ) const = default;
#else
    bool operator==( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeSemaphoreInfoCount == rhs.stripeSemaphoreInfoCount ) &&
             ( pStripeSemaphoreInfos == rhs.pStripeSemaphoreInfos );
#  endif
    }

    bool operator!=( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                    = StructureType::eRenderPassStripeSubmitInfoARM;
    const void *                pNext                    = {};
    uint32_t                    stripeSemaphoreInfoCount = {};
    const SemaphoreSubmitInfo * pStripeSemaphoreInfos    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassStripeSubmitInfoARM>
  {
    using Type = RenderPassStripeSubmitInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassStripeSubmitInfoARM>
  {
    using Type = RenderPassStripeSubmitInfoARM;
  };

  // wrapper struct for struct VkRenderPassSubpassFeedbackInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackInfoEXT.html
  struct RenderPassSubpassFeedbackInfoEXT
  {
    using NativeType = VkRenderPassSubpassFeedbackInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( SubpassMergeStatusEXT subpassMergeStatus_                      = SubpassMergeStatusEXT::eMerged,
                                                              std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                                              uint32_t                                          postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpassMergeStatus{ subpassMergeStatus_ }
      , description{ description_ }
      , postMergeIndex{ postMergeIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackInfoEXT const *>( &rhs ) )
    {
    }

    RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassSubpassFeedbackInfoEXT const *>( &rhs );
      return *this;
    }

    operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassSubpassFeedbackInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassSubpassFeedbackInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<SubpassMergeStatusEXT const &, ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( subpassMergeStatus, description, postMergeIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = subpassMergeStatus <=> rhs.subpassMergeStatus; cmp != 0 )
        return cmp;
      if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = postMergeIndex <=> rhs.postMergeIndex; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( strcmp( description, rhs.description ) == 0 ) && ( postMergeIndex == rhs.postMergeIndex );
    }

    bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    SubpassMergeStatusEXT                         subpassMergeStatus = SubpassMergeStatusEXT::eMerged;
    ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description        = {};
    uint32_t                                      postMergeIndex     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassSubpassFeedbackInfoEXT>
  {
    using Type = RenderPassSubpassFeedbackInfoEXT;
  };
#endif

  // wrapper struct for struct VkRenderPassSubpassFeedbackCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackCreateInfoEXT.html
  struct RenderPassSubpassFeedbackCreateInfoEXT
  {
    using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {},
                                                                    const void *                       pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pSubpassFeedback{ pSubpassFeedback_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast<RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs ) )
    {
    }

    RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassSubpassFeedbackCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT &
      setPSubpassFeedback( RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubpassFeedback = pSubpassFeedback_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
    }

    operator VkRenderPassSubpassFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassSubpassFeedbackCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, RenderPassSubpassFeedbackInfoEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pSubpassFeedback );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback );
#  endif
    }

    bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType            = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT;
    const void *                       pNext            = {};
    RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassSubpassFeedbackCreateInfoEXT>
  {
    using Type = RenderPassSubpassFeedbackCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassSubpassFeedbackCreateInfoEXT>
  {
    using Type = RenderPassSubpassFeedbackCreateInfoEXT;
  };

  // wrapper struct for struct VkRenderPassTileShadingCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTileShadingCreateInfoQCOM.html
  struct RenderPassTileShadingCreateInfoQCOM
  {
    using NativeType = VkRenderPassTileShadingCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassTileShadingCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM( TileShadingRenderPassFlagsQCOM flags_         = {},
                                                              Extent2D                       tileApronSize_ = {},
                                                              const void *                   pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , tileApronSize{ tileApronSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassTileShadingCreateInfoQCOM( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassTileShadingCreateInfoQCOM( *reinterpret_cast<RenderPassTileShadingCreateInfoQCOM const *>( &rhs ) )
    {
    }

    RenderPassTileShadingCreateInfoQCOM & operator=( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassTileShadingCreateInfoQCOM & operator=( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassTileShadingCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setFlags( TileShadingRenderPassFlagsQCOM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setTileApronSize( Extent2D const & tileApronSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tileApronSize = tileApronSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassTileShadingCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassTileShadingCreateInfoQCOM *>( this );
    }

    operator VkRenderPassTileShadingCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassTileShadingCreateInfoQCOM *>( this );
    }

    operator VkRenderPassTileShadingCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassTileShadingCreateInfoQCOM *>( this );
    }

    operator VkRenderPassTileShadingCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassTileShadingCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TileShadingRenderPassFlagsQCOM const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, tileApronSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassTileShadingCreateInfoQCOM const & ) const = default;
#else
    bool operator==( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( tileApronSize == rhs.tileApronSize );
#  endif
    }

    bool operator!=( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType         = StructureType::eRenderPassTileShadingCreateInfoQCOM;
    const void *                   pNext         = {};
    TileShadingRenderPassFlagsQCOM flags         = {};
    Extent2D                       tileApronSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassTileShadingCreateInfoQCOM>
  {
    using Type = RenderPassTileShadingCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassTileShadingCreateInfoQCOM>
  {
    using Type = RenderPassTileShadingCreateInfoQCOM;
  };

  // wrapper struct for struct VkRenderPassTransformBeginInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTransformBeginInfoQCOM.html
  struct RenderPassTransformBeginInfoQCOM
  {
    using NativeType = VkRenderPassTransformBeginInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderPassTransformBeginInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity,
                                                           const void *                pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , transform{ transform_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
    {
    }

    RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM *>( this );
    }

    operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM *>( this );
    }

    operator VkRenderPassTransformBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM *>( this );
    }

    operator VkRenderPassTransformBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderPassTransformBeginInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceTransformFlagBitsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, transform );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default;
#else
    bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
#  endif
    }

    bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType     = StructureType::eRenderPassTransformBeginInfoQCOM;
    const void *                pNext     = {};
    SurfaceTransformFlagBitsKHR transform = SurfaceTransformFlagBitsKHR::eIdentity;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderPassTransformBeginInfoQCOM>
  {
    using Type = RenderPassTransformBeginInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
  {
    using Type = RenderPassTransformBeginInfoQCOM;
  };

  // wrapper struct for struct VkRenderingAreaInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAreaInfo.html
  struct RenderingAreaInfo
  {
    using NativeType = VkRenderingAreaInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingAreaInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingAreaInfo( uint32_t       viewMask_                = {},
                                            uint32_t       colorAttachmentCount_    = {},
                                            const Format * pColorAttachmentFormats_ = {},
                                            Format         depthAttachmentFormat_   = Format::eUndefined,
                                            Format         stencilAttachmentFormat_ = Format::eUndefined,
                                            const void *   pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , viewMask{ viewMask_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentFormats{ pColorAttachmentFormats_ }
      , depthAttachmentFormat{ depthAttachmentFormat_ }
      , stencilAttachmentFormat{ stencilAttachmentFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderingAreaInfo( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingAreaInfo( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingAreaInfo( *reinterpret_cast<RenderingAreaInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingAreaInfo( uint32_t                                      viewMask_,
                       ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_,
                       Format                                        depthAttachmentFormat_   = Format::eUndefined,
                       Format                                        stencilAttachmentFormat_ = Format::eUndefined,
                       const void *                                  pNext_                   = nullptr )
      : pNext( pNext_ )
      , viewMask( viewMask_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) )
      , pColorAttachmentFormats( colorAttachmentFormats_.data() )
      , depthAttachmentFormat( depthAttachmentFormat_ )
      , stencilAttachmentFormat( stencilAttachmentFormat_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderingAreaInfo & operator=( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingAreaInfo & operator=( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingAreaInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentFormats = pColorAttachmentFormats_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingAreaInfo & setColorAttachmentFormats( ArrayProxyNoTemporaries<const Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount    = static_cast<uint32_t>( colorAttachmentFormats_.size() );
      pColorAttachmentFormats = colorAttachmentFormats_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      depthAttachmentFormat = depthAttachmentFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilAttachmentFormat = stencilAttachmentFormat_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingAreaInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingAreaInfo *>( this );
    }

    operator VkRenderingAreaInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingAreaInfo *>( this );
    }

    operator VkRenderingAreaInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingAreaInfo *>( this );
    }

    operator VkRenderingAreaInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingAreaInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const Format * const &, Format const &, Format const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingAreaInfo const & ) const = default;
#else
    bool operator==( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) &&
             ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
#  endif
    }

    bool operator!=( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType                   = StructureType::eRenderingAreaInfo;
    const void *   pNext                   = {};
    uint32_t       viewMask                = {};
    uint32_t       colorAttachmentCount    = {};
    const Format * pColorAttachmentFormats = {};
    Format         depthAttachmentFormat   = Format::eUndefined;
    Format         stencilAttachmentFormat = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingAreaInfo>
  {
    using Type = RenderingAreaInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingAreaInfo>
  {
    using Type = RenderingAreaInfo;
  };

  using RenderingAreaInfoKHR = RenderingAreaInfo;

  // wrapper struct for struct VkRenderingAttachmentFlagsInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentFlagsInfoKHR.html
  struct RenderingAttachmentFlagsInfoKHR
  {
    using NativeType = VkRenderingAttachmentFlagsInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingAttachmentFlagsInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingAttachmentFlagsInfoKHR( RenderingAttachmentFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderingAttachmentFlagsInfoKHR( RenderingAttachmentFlagsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingAttachmentFlagsInfoKHR( VkRenderingAttachmentFlagsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingAttachmentFlagsInfoKHR( *reinterpret_cast<RenderingAttachmentFlagsInfoKHR const *>( &rhs ) )
    {
    }

    RenderingAttachmentFlagsInfoKHR & operator=( RenderingAttachmentFlagsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingAttachmentFlagsInfoKHR & operator=( VkRenderingAttachmentFlagsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingAttachmentFlagsInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentFlagsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentFlagsInfoKHR & setFlags( RenderingAttachmentFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingAttachmentFlagsInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingAttachmentFlagsInfoKHR *>( this );
    }

    operator VkRenderingAttachmentFlagsInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingAttachmentFlagsInfoKHR *>( this );
    }

    operator VkRenderingAttachmentFlagsInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingAttachmentFlagsInfoKHR *>( this );
    }

    operator VkRenderingAttachmentFlagsInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingAttachmentFlagsInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, RenderingAttachmentFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingAttachmentFlagsInfoKHR const & ) const = default;
#else
    bool operator==( RenderingAttachmentFlagsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( RenderingAttachmentFlagsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType = StructureType::eRenderingAttachmentFlagsInfoKHR;
    const void *                pNext = {};
    RenderingAttachmentFlagsKHR flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingAttachmentFlagsInfoKHR>
  {
    using Type = RenderingAttachmentFlagsInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingAttachmentFlagsInfoKHR>
  {
    using Type = RenderingAttachmentFlagsInfoKHR;
  };

  // wrapper struct for struct VkRenderingAttachmentInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentInfo.html
  struct RenderingAttachmentInfo
  {
    using NativeType = VkRenderingAttachmentInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingAttachmentInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( ImageView           imageView_          = {},
                                                     ImageLayout         imageLayout_        = ImageLayout::eUndefined,
                                                     ResolveModeFlagBits resolveMode_        = ResolveModeFlagBits::eNone,
                                                     ImageView           resolveImageView_   = {},
                                                     ImageLayout         resolveImageLayout_ = ImageLayout::eUndefined,
                                                     AttachmentLoadOp    loadOp_             = AttachmentLoadOp::eLoad,
                                                     AttachmentStoreOp   storeOp_            = AttachmentStoreOp::eStore,
                                                     ClearValue          clearValue_         = {},
                                                     const void *        pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageView{ imageView_ }
      , imageLayout{ imageLayout_ }
      , resolveMode{ resolveMode_ }
      , resolveImageView{ resolveImageView_ }
      , resolveImageLayout{ resolveImageLayout_ }
      , loadOp{ loadOp_ }
      , storeOp{ storeOp_ }
      , clearValue{ clearValue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
    {
    }

    RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      imageLayout = imageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      resolveMode = resolveMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
    {
      resolveImageView = resolveImageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      resolveImageLayout = resolveImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      loadOp = loadOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
    {
      storeOp = storeOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValue = clearValue_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingAttachmentInfo *>( this );
    }

    operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingAttachmentInfo *>( this );
    }

    operator VkRenderingAttachmentInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingAttachmentInfo *>( this );
    }

    operator VkRenderingAttachmentInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingAttachmentInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ImageView const &,
               ImageLayout const &,
               ResolveModeFlagBits const &,
               ImageView const &,
               ImageLayout const &,
               AttachmentLoadOp const &,
               AttachmentStoreOp const &,
               ClearValue const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
    }
#endif

  public:
    StructureType       sType              = StructureType::eRenderingAttachmentInfo;
    const void *        pNext              = {};
    ImageView           imageView          = {};
    ImageLayout         imageLayout        = ImageLayout::eUndefined;
    ResolveModeFlagBits resolveMode        = ResolveModeFlagBits::eNone;
    ImageView           resolveImageView   = {};
    ImageLayout         resolveImageLayout = ImageLayout::eUndefined;
    AttachmentLoadOp    loadOp             = AttachmentLoadOp::eLoad;
    AttachmentStoreOp   storeOp            = AttachmentStoreOp::eStore;
    ClearValue          clearValue         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingAttachmentInfo>
  {
    using Type = RenderingAttachmentInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
  {
    using Type = RenderingAttachmentInfo;
  };

  using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;

  // wrapper struct for struct VkRenderingAttachmentLocationInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentLocationInfo.html
  struct RenderingAttachmentLocationInfo
  {
    using NativeType = VkRenderingAttachmentLocationInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingAttachmentLocationInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( uint32_t         colorAttachmentCount_      = {},
                                                          const uint32_t * pColorAttachmentLocations_ = {},
                                                          const void *     pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentLocations{ pColorAttachmentLocations_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingAttachmentLocationInfo( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingAttachmentLocationInfo( *reinterpret_cast<RenderingAttachmentLocationInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingAttachmentLocationInfo( ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentLocations_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentLocations_.size() ) )
      , pColorAttachmentLocations( colorAttachmentLocations_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderingAttachmentLocationInfo & operator=( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingAttachmentLocationInfo & operator=( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingAttachmentLocationInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentLocations = pColorAttachmentLocations_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingAttachmentLocationInfo &
      setColorAttachmentLocations( ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount      = static_cast<uint32_t>( colorAttachmentLocations_.size() );
      pColorAttachmentLocations = colorAttachmentLocations_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingAttachmentLocationInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( this );
    }

    operator VkRenderingAttachmentLocationInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingAttachmentLocationInfo *>( this );
    }

    operator VkRenderingAttachmentLocationInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( this );
    }

    operator VkRenderingAttachmentLocationInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingAttachmentLocationInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentLocations );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingAttachmentLocationInfo const & ) const = default;
#else
    bool operator==( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachmentLocations == rhs.pColorAttachmentLocations );
#  endif
    }

    bool operator!=( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                     = StructureType::eRenderingAttachmentLocationInfo;
    const void *     pNext                     = {};
    uint32_t         colorAttachmentCount      = {};
    const uint32_t * pColorAttachmentLocations = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingAttachmentLocationInfo>
  {
    using Type = RenderingAttachmentLocationInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingAttachmentLocationInfo>
  {
    using Type = RenderingAttachmentLocationInfo;
  };

  using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo;

  // wrapper struct for struct VkRenderingEndInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingEndInfoKHR.html
  struct RenderingEndInfoKHR
  {
    using NativeType = VkRenderingEndInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingEndInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingEndInfoKHR( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR RenderingEndInfoKHR( RenderingEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingEndInfoKHR( VkRenderingEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingEndInfoKHR( *reinterpret_cast<RenderingEndInfoKHR const *>( &rhs ) )
    {
    }

    RenderingEndInfoKHR & operator=( RenderingEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingEndInfoKHR & operator=( VkRenderingEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingEndInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingEndInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingEndInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingEndInfoKHR *>( this );
    }

    operator VkRenderingEndInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingEndInfoKHR *>( this );
    }

    operator VkRenderingEndInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingEndInfoKHR *>( this );
    }

    operator VkRenderingEndInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingEndInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingEndInfoKHR const & ) const = default;
#else
    bool operator==( RenderingEndInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( RenderingEndInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eRenderingEndInfoKHR;
    const void *  pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingEndInfoKHR>
  {
    using Type = RenderingEndInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingEndInfoKHR>
  {
    using Type = RenderingEndInfoKHR;
  };

  using RenderingEndInfoEXT = RenderingEndInfoKHR;

  // wrapper struct for struct VkRenderingFragmentDensityMapAttachmentInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentDensityMapAttachmentInfoEXT.html
  struct RenderingFragmentDensityMapAttachmentInfoEXT
  {
    using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( ImageView    imageView_   = {},
                                                                       ImageLayout  imageLayout_ = ImageLayout::eUndefined,
                                                                       const void * pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageView{ imageView_ }
      , imageLayout{ imageLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
    {
    }

    RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      imageLayout = imageLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
    }

    operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
    }

    operator VkRenderingFragmentDensityMapAttachmentInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
    }

    operator VkRenderingFragmentDensityMapAttachmentInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageView const &, ImageLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageView, imageLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default;
#else
    bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
#  endif
    }

    bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
    const void *  pNext       = {};
    ImageView     imageView   = {};
    ImageLayout   imageLayout = ImageLayout::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingFragmentDensityMapAttachmentInfoEXT>
  {
    using Type = RenderingFragmentDensityMapAttachmentInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT>
  {
    using Type = RenderingFragmentDensityMapAttachmentInfoEXT;
  };

  // wrapper struct for struct VkRenderingFragmentShadingRateAttachmentInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentShadingRateAttachmentInfoKHR.html
  struct RenderingFragmentShadingRateAttachmentInfoKHR
  {
    using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( ImageView    imageView_                      = {},
                                                                        ImageLayout  imageLayout_                    = ImageLayout::eUndefined,
                                                                        Extent2D     shadingRateAttachmentTexelSize_ = {},
                                                                        const void * pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , imageView{ imageView_ }
      , imageLayout{ imageLayout_ }
      , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
    {
    }

    RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      imageLayout = imageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &
      setShadingRateAttachmentTexelSize( Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkRenderingFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkRenderingFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageView const &, ImageLayout const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default;
#else
    bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) &&
             ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
#  endif
    }

    bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                          = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
    const void *  pNext                          = {};
    ImageView     imageView                      = {};
    ImageLayout   imageLayout                    = ImageLayout::eUndefined;
    Extent2D      shadingRateAttachmentTexelSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingFragmentShadingRateAttachmentInfoKHR>
  {
    using Type = RenderingFragmentShadingRateAttachmentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR>
  {
    using Type = RenderingFragmentShadingRateAttachmentInfoKHR;
  };

  // wrapper struct for struct VkRenderingInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInfo.html
  struct RenderingInfo
  {
    using NativeType = VkRenderingInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingFlags                  flags_                = {},
                                           Rect2D                          renderArea_           = {},
                                           uint32_t                        layerCount_           = {},
                                           uint32_t                        viewMask_             = {},
                                           uint32_t                        colorAttachmentCount_ = {},
                                           const RenderingAttachmentInfo * pColorAttachments_    = {},
                                           const RenderingAttachmentInfo * pDepthAttachment_     = {},
                                           const RenderingAttachmentInfo * pStencilAttachment_   = {},
                                           const void *                    pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , renderArea{ renderArea_ }
      , layerCount{ layerCount_ }
      , viewMask{ viewMask_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachments{ pColorAttachments_ }
      , pDepthAttachment{ pDepthAttachment_ }
      , pStencilAttachment{ pStencilAttachment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingInfo( RenderingFlags                                                 flags_,
                   Rect2D                                                         renderArea_,
                   uint32_t                                                       layerCount_,
                   uint32_t                                                       viewMask_,
                   ArrayProxyNoTemporaries<const RenderingAttachmentInfo> const & colorAttachments_,
                   const RenderingAttachmentInfo *                                pDepthAttachment_   = {},
                   const RenderingAttachmentInfo *                                pStencilAttachment_ = {},
                   const void *                                                   pNext_              = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , renderArea( renderArea_ )
      , layerCount( layerCount_ )
      , viewMask( viewMask_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
      , pColorAttachments( colorAttachments_.data() )
      , pDepthAttachment( pDepthAttachment_ )
      , pStencilAttachment( pStencilAttachment_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
    {
      renderArea = renderArea_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachments = pColorAttachments_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingInfo & setColorAttachments( ArrayProxyNoTemporaries<const RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
      pColorAttachments    = colorAttachments_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthAttachment = pDepthAttachment_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment( const RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pStencilAttachment = pStencilAttachment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingInfo *>( this );
    }

    operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingInfo *>( this );
    }

    operator VkRenderingInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingInfo *>( this );
    }

    operator VkRenderingInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               RenderingFlags const &,
               Rect2D const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               const RenderingAttachmentInfo * const &,
               const RenderingAttachmentInfo * const &,
               const RenderingAttachmentInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingInfo const & ) const = default;
#else
    bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) &&
             ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment );
#  endif
    }

    bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                = StructureType::eRenderingInfo;
    const void *                    pNext                = {};
    RenderingFlags                  flags                = {};
    Rect2D                          renderArea           = {};
    uint32_t                        layerCount           = {};
    uint32_t                        viewMask             = {};
    uint32_t                        colorAttachmentCount = {};
    const RenderingAttachmentInfo * pColorAttachments    = {};
    const RenderingAttachmentInfo * pDepthAttachment     = {};
    const RenderingAttachmentInfo * pStencilAttachment   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingInfo>
  {
    using Type = RenderingInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingInfo>
  {
    using Type = RenderingInfo;
  };

  using RenderingInfoKHR = RenderingInfo;

  // wrapper struct for struct VkRenderingInputAttachmentIndexInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInputAttachmentIndexInfo.html
  struct RenderingInputAttachmentIndexInfo
  {
    using NativeType = VkRenderingInputAttachmentIndexInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eRenderingInputAttachmentIndexInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( uint32_t         colorAttachmentCount_         = {},
                                                            const uint32_t * pColorAttachmentInputIndices_ = {},
                                                            const uint32_t * pDepthInputAttachmentIndex_   = {},
                                                            const uint32_t * pStencilInputAttachmentIndex_ = {},
                                                            const void *     pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , colorAttachmentCount{ colorAttachmentCount_ }
      , pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ }
      , pDepthInputAttachmentIndex{ pDepthInputAttachmentIndex_ }
      , pStencilInputAttachmentIndex{ pStencilInputAttachmentIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderingInputAttachmentIndexInfo( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderingInputAttachmentIndexInfo( *reinterpret_cast<RenderingInputAttachmentIndexInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingInputAttachmentIndexInfo( ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentInputIndices_,
                                       const uint32_t *                                pDepthInputAttachmentIndex_   = {},
                                       const uint32_t *                                pStencilInputAttachmentIndex_ = {},
                                       const void *                                    pNext_                        = nullptr )
      : pNext( pNext_ )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachmentInputIndices_.size() ) )
      , pColorAttachmentInputIndices( colorAttachmentInputIndices_.data() )
      , pDepthInputAttachmentIndex( pDepthInputAttachmentIndex_ )
      , pStencilInputAttachmentIndex( pStencilInputAttachmentIndex_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderingInputAttachmentIndexInfo & operator=( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    RenderingInputAttachmentIndexInfo & operator=( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<RenderingInputAttachmentIndexInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo &
      setPColorAttachmentInputIndices( const uint32_t * pColorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachmentInputIndices = pColorAttachmentInputIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderingInputAttachmentIndexInfo &
      setColorAttachmentInputIndices( ArrayProxyNoTemporaries<const uint32_t> const & colorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount         = static_cast<uint32_t>( colorAttachmentInputIndices_.size() );
      pColorAttachmentInputIndices = colorAttachmentInputIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo &
      setPDepthInputAttachmentIndex( const uint32_t * pDepthInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthInputAttachmentIndex = pDepthInputAttachmentIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo &
      setPStencilInputAttachmentIndex( const uint32_t * pStencilInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      pStencilInputAttachmentIndex = pStencilInputAttachmentIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkRenderingInputAttachmentIndexInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( this );
    }

    operator VkRenderingInputAttachmentIndexInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderingInputAttachmentIndexInfo *>( this );
    }

    operator VkRenderingInputAttachmentIndexInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( this );
    }

    operator VkRenderingInputAttachmentIndexInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkRenderingInputAttachmentIndexInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, const uint32_t * const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentInputIndices, pDepthInputAttachmentIndex, pStencilInputAttachmentIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderingInputAttachmentIndexInfo const & ) const = default;
#else
    bool operator==( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) &&
             ( pColorAttachmentInputIndices == rhs.pColorAttachmentInputIndices ) && ( pDepthInputAttachmentIndex == rhs.pDepthInputAttachmentIndex ) &&
             ( pStencilInputAttachmentIndex == rhs.pStencilInputAttachmentIndex );
#  endif
    }

    bool operator!=( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                        = StructureType::eRenderingInputAttachmentIndexInfo;
    const void *     pNext                        = {};
    uint32_t         colorAttachmentCount         = {};
    const uint32_t * pColorAttachmentInputIndices = {};
    const uint32_t * pDepthInputAttachmentIndex   = {};
    const uint32_t * pStencilInputAttachmentIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkRenderingInputAttachmentIndexInfo>
  {
    using Type = RenderingInputAttachmentIndexInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eRenderingInputAttachmentIndexInfo>
  {
    using Type = RenderingInputAttachmentIndexInfo;
  };

  using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo;

  // wrapper struct for struct VkResolveImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveImageInfo2.html
  struct ResolveImageInfo2
  {
    using NativeType = VkResolveImageInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eResolveImageInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ResolveImageInfo2( Image                 srcImage_       = {},
                                            ImageLayout           srcImageLayout_ = ImageLayout::eUndefined,
                                            Image                 dstImage_       = {},
                                            ImageLayout           dstImageLayout_ = ImageLayout::eUndefined,
                                            uint32_t              regionCount_    = {},
                                            const ImageResolve2 * pRegions_       = {},
                                            const void *          pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcImage{ srcImage_ }
      , srcImageLayout{ srcImageLayout_ }
      , dstImage{ dstImage_ }
      , dstImageLayout{ dstImageLayout_ }
      , regionCount{ regionCount_ }
      , pRegions{ pRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ResolveImageInfo2( Image                                                srcImage_,
                       ImageLayout                                          srcImageLayout_,
                       Image                                                dstImage_,
                       ImageLayout                                          dstImageLayout_,
                       ArrayProxyNoTemporaries<const ImageResolve2> const & regions_,
                       const void *                                         pNext_ = nullptr )
      : pNext( pNext_ )
      , srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ResolveImageInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ResolveImageInfo2 & setRegions( ArrayProxyNoTemporaries<const ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkResolveImageInfo2 *>( this );
    }

    operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkResolveImageInfo2 *>( this );
    }

    operator VkResolveImageInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkResolveImageInfo2 *>( this );
    }

    operator VkResolveImageInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkResolveImageInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Image const &,
               ImageLayout const &,
               Image const &,
               ImageLayout const &,
               uint32_t const &,
               const ImageResolve2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ResolveImageInfo2 const & ) const = default;
#else
    bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) &&
             ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
#  endif
    }

    bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType          = StructureType::eResolveImageInfo2;
    const void *          pNext          = {};
    Image                 srcImage       = {};
    ImageLayout           srcImageLayout = ImageLayout::eUndefined;
    Image                 dstImage       = {};
    ImageLayout           dstImageLayout = ImageLayout::eUndefined;
    uint32_t              regionCount    = {};
    const ImageResolve2 * pRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkResolveImageInfo2>
  {
    using Type = ResolveImageInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eResolveImageInfo2>
  {
    using Type = ResolveImageInfo2;
  };

  using ResolveImageInfo2KHR = ResolveImageInfo2;

  // wrapper struct for struct VkResolveImageModeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveImageModeInfoKHR.html
  struct ResolveImageModeInfoKHR
  {
    using NativeType = VkResolveImageModeInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eResolveImageModeInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ResolveImageModeInfoKHR( ResolveImageFlagsKHR flags_              = {},
                                                  ResolveModeFlagBits  resolveMode_        = ResolveModeFlagBits::eNone,
                                                  ResolveModeFlagBits  stencilResolveMode_ = ResolveModeFlagBits::eNone,
                                                  const void *         pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , resolveMode{ resolveMode_ }
      , stencilResolveMode{ stencilResolveMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ResolveImageModeInfoKHR( ResolveImageModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ResolveImageModeInfoKHR( VkResolveImageModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ResolveImageModeInfoKHR( *reinterpret_cast<ResolveImageModeInfoKHR const *>( &rhs ) )
    {
    }

    ResolveImageModeInfoKHR & operator=( ResolveImageModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ResolveImageModeInfoKHR & operator=( VkResolveImageModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ResolveImageModeInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ResolveImageModeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageModeInfoKHR & setFlags( ResolveImageFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageModeInfoKHR & setResolveMode( ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      resolveMode = resolveMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ResolveImageModeInfoKHR & setStencilResolveMode( ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilResolveMode = stencilResolveMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkResolveImageModeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkResolveImageModeInfoKHR *>( this );
    }

    operator VkResolveImageModeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkResolveImageModeInfoKHR *>( this );
    }

    operator VkResolveImageModeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkResolveImageModeInfoKHR *>( this );
    }

    operator VkResolveImageModeInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkResolveImageModeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ResolveImageFlagsKHR const &, ResolveModeFlagBits const &, ResolveModeFlagBits const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, resolveMode, stencilResolveMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ResolveImageModeInfoKHR const & ) const = default;
#else
    bool operator==( ResolveImageModeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( resolveMode == rhs.resolveMode ) &&
             ( stencilResolveMode == rhs.stencilResolveMode );
#  endif
    }

    bool operator!=( ResolveImageModeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType              = StructureType::eResolveImageModeInfoKHR;
    const void *         pNext              = {};
    ResolveImageFlagsKHR flags              = {};
    ResolveModeFlagBits  resolveMode        = ResolveModeFlagBits::eNone;
    ResolveModeFlagBits  stencilResolveMode = ResolveModeFlagBits::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkResolveImageModeInfoKHR>
  {
    using Type = ResolveImageModeInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eResolveImageModeInfoKHR>
  {
    using Type = ResolveImageModeInfoKHR;
  };

  // wrapper struct for struct VkSamplerBlockMatchWindowCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBlockMatchWindowCreateInfoQCOM.html
  struct SamplerBlockMatchWindowCreateInfoQCOM
  {
    using NativeType = VkSamplerBlockMatchWindowCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( Extent2D                        windowExtent_      = {},
                                                                BlockMatchWindowCompareModeQCOM windowCompareMode_ = BlockMatchWindowCompareModeQCOM::eMin,
                                                                const void *                    pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , windowExtent{ windowExtent_ }
      , windowCompareMode{ windowCompareMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerBlockMatchWindowCreateInfoQCOM( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerBlockMatchWindowCreateInfoQCOM( *reinterpret_cast<SamplerBlockMatchWindowCreateInfoQCOM const *>( &rhs ) )
    {
    }

    SamplerBlockMatchWindowCreateInfoQCOM & operator=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerBlockMatchWindowCreateInfoQCOM & operator=( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerBlockMatchWindowCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowExtent( Extent2D const & windowExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      windowExtent = windowExtent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM &
      setWindowCompareMode( BlockMatchWindowCompareModeQCOM windowCompareMode_ ) VULKAN_HPP_NOEXCEPT
    {
      windowCompareMode = windowCompareMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerBlockMatchWindowCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerBlockMatchWindowCreateInfoQCOM *>( this );
    }

    operator VkSamplerBlockMatchWindowCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerBlockMatchWindowCreateInfoQCOM *>( this );
    }

    operator VkSamplerBlockMatchWindowCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerBlockMatchWindowCreateInfoQCOM *>( this );
    }

    operator VkSamplerBlockMatchWindowCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerBlockMatchWindowCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Extent2D const &, BlockMatchWindowCompareModeQCOM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, windowExtent, windowCompareMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerBlockMatchWindowCreateInfoQCOM const & ) const = default;
#else
    bool operator==( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( windowExtent == rhs.windowExtent ) && ( windowCompareMode == rhs.windowCompareMode );
#  endif
    }

    bool operator!=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType             = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM;
    const void *                    pNext             = {};
    Extent2D                        windowExtent      = {};
    BlockMatchWindowCompareModeQCOM windowCompareMode = BlockMatchWindowCompareModeQCOM::eMin;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerBlockMatchWindowCreateInfoQCOM>
  {
    using Type = SamplerBlockMatchWindowCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerBlockMatchWindowCreateInfoQCOM>
  {
    using Type = SamplerBlockMatchWindowCreateInfoQCOM;
  };

  // wrapper struct for struct VkSamplerBorderColorComponentMappingCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBorderColorComponentMappingCreateInfoEXT.html
  struct SamplerBorderColorComponentMappingCreateInfoEXT
  {
    using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SamplerBorderColorComponentMappingCreateInfoEXT( ComponentMapping components_ = {}, Bool32 srgb_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , components{ components_ }
      , srgb{ srgb_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
    {
    }

    SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setComponents( ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
    {
      components = components_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
    {
      srgb = srgb_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
    }

    operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
    }

    operator VkSamplerBorderColorComponentMappingCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
    }

    operator VkSamplerBorderColorComponentMappingCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ComponentMapping const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, components, srgb );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default;
#else
    bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb );
#  endif
    }

    bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType      = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
    const void *     pNext      = {};
    ComponentMapping components = {};
    Bool32           srgb       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerBorderColorComponentMappingCreateInfoEXT>
  {
    using Type = SamplerBorderColorComponentMappingCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT>
  {
    using Type = SamplerBorderColorComponentMappingCreateInfoEXT;
  };

  // wrapper struct for struct VkSamplerCaptureDescriptorDataInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCaptureDescriptorDataInfoEXT.html
  struct SamplerCaptureDescriptorDataInfoEXT
  {
    using NativeType = VkSamplerCaptureDescriptorDataInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerCaptureDescriptorDataInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sampler{ sampler_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast<SamplerCaptureDescriptorDataInfoEXT const *>( &rhs ) )
    {
    }

    SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerCaptureDescriptorDataInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler = sampler_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkSamplerCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( this );
    }

    operator VkSamplerCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerCaptureDescriptorDataInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Sampler const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sampler );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default;
#else
    bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampler == rhs.sampler );
#  endif
    }

    bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::eSamplerCaptureDescriptorDataInfoEXT;
    const void *  pNext   = {};
    Sampler       sampler = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerCaptureDescriptorDataInfoEXT>
  {
    using Type = SamplerCaptureDescriptorDataInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerCaptureDescriptorDataInfoEXT>
  {
    using Type = SamplerCaptureDescriptorDataInfoEXT;
  };

  // wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html
  struct SamplerCreateInfo
  {
    using NativeType = VkSamplerCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateFlags flags_                   = {},
                                            Filter             magFilter_               = Filter::eNearest,
                                            Filter             minFilter_               = Filter::eNearest,
                                            SamplerMipmapMode  mipmapMode_              = SamplerMipmapMode::eNearest,
                                            SamplerAddressMode addressModeU_            = SamplerAddressMode::eRepeat,
                                            SamplerAddressMode addressModeV_            = SamplerAddressMode::eRepeat,
                                            SamplerAddressMode addressModeW_            = SamplerAddressMode::eRepeat,
                                            float              mipLodBias_              = {},
                                            Bool32             anisotropyEnable_        = {},
                                            float              maxAnisotropy_           = {},
                                            Bool32             compareEnable_           = {},
                                            CompareOp          compareOp_               = CompareOp::eNever,
                                            float              minLod_                  = {},
                                            float              maxLod_                  = {},
                                            BorderColor        borderColor_             = BorderColor::eFloatTransparentBlack,
                                            Bool32             unnormalizedCoordinates_ = {},
                                            const void *       pNext_                   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , magFilter{ magFilter_ }
      , minFilter{ minFilter_ }
      , mipmapMode{ mipmapMode_ }
      , addressModeU{ addressModeU_ }
      , addressModeV{ addressModeV_ }
      , addressModeW{ addressModeW_ }
      , mipLodBias{ mipLodBias_ }
      , anisotropyEnable{ anisotropyEnable_ }
      , maxAnisotropy{ maxAnisotropy_ }
      , compareEnable{ compareEnable_ }
      , compareOp{ compareOp_ }
      , minLod{ minLod_ }
      , maxLod{ maxLod_ }
      , borderColor{ borderColor_ }
      , unnormalizedCoordinates{ unnormalizedCoordinates_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) ) {}

    SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      magFilter = magFilter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      minFilter = minFilter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
    {
      mipmapMode = mipmapMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeU = addressModeU_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeV = addressModeV_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeW = addressModeW_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLodBias = mipLodBias_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      anisotropyEnable = anisotropyEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
    {
      maxAnisotropy = maxAnisotropy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      compareEnable = compareEnable_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      compareOp = compareOp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
    {
      minLod = minLod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLod = maxLod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
    {
      borderColor = borderColor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
    {
      unnormalizedCoordinates = unnormalizedCoordinates_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCreateInfo *>( this );
    }

    operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCreateInfo *>( this );
    }

    operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerCreateInfo *>( this );
    }

    operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               SamplerCreateFlags const &,
               Filter const &,
               Filter const &,
               SamplerMipmapMode const &,
               SamplerAddressMode const &,
               SamplerAddressMode const &,
               SamplerAddressMode const &,
               float const &,
               Bool32 const &,
               float const &,
               Bool32 const &,
               CompareOp const &,
               float const &,
               float const &,
               BorderColor const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       magFilter,
                       minFilter,
                       mipmapMode,
                       addressModeU,
                       addressModeV,
                       addressModeW,
                       mipLodBias,
                       anisotropyEnable,
                       maxAnisotropy,
                       compareEnable,
                       compareOp,
                       minLod,
                       maxLod,
                       borderColor,
                       unnormalizedCoordinates );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerCreateInfo const & ) const = default;
#else
    bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) &&
             ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) &&
             ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) &&
             ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) &&
             ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
#  endif
    }

    bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType                   = StructureType::eSamplerCreateInfo;
    const void *       pNext                   = {};
    SamplerCreateFlags flags                   = {};
    Filter             magFilter               = Filter::eNearest;
    Filter             minFilter               = Filter::eNearest;
    SamplerMipmapMode  mipmapMode              = SamplerMipmapMode::eNearest;
    SamplerAddressMode addressModeU            = SamplerAddressMode::eRepeat;
    SamplerAddressMode addressModeV            = SamplerAddressMode::eRepeat;
    SamplerAddressMode addressModeW            = SamplerAddressMode::eRepeat;
    float              mipLodBias              = {};
    Bool32             anisotropyEnable        = {};
    float              maxAnisotropy           = {};
    Bool32             compareEnable           = {};
    CompareOp          compareOp               = CompareOp::eNever;
    float              minLod                  = {};
    float              maxLod                  = {};
    BorderColor        borderColor             = BorderColor::eFloatTransparentBlack;
    Bool32             unnormalizedCoordinates = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerCreateInfo>
  {
    using Type = SamplerCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerCreateInfo>
  {
    using Type = SamplerCreateInfo;
  };

  // wrapper struct for struct VkSamplerCubicWeightsCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCubicWeightsCreateInfoQCOM.html
  struct SamplerCubicWeightsCreateInfoQCOM
  {
    using NativeType = VkSamplerCubicWeightsCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerCubicWeightsCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( CubicFilterWeightsQCOM cubicWeights_ = CubicFilterWeightsQCOM::eCatmullRom,
                                                            const void *           pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , cubicWeights{ cubicWeights_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCubicWeightsCreateInfoQCOM( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerCubicWeightsCreateInfoQCOM( *reinterpret_cast<SamplerCubicWeightsCreateInfoQCOM const *>( &rhs ) )
    {
    }

    SamplerCubicWeightsCreateInfoQCOM & operator=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerCubicWeightsCreateInfoQCOM & operator=( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerCubicWeightsCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setCubicWeights( CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT
    {
      cubicWeights = cubicWeights_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerCubicWeightsCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCubicWeightsCreateInfoQCOM *>( this );
    }

    operator VkSamplerCubicWeightsCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCubicWeightsCreateInfoQCOM *>( this );
    }

    operator VkSamplerCubicWeightsCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerCubicWeightsCreateInfoQCOM *>( this );
    }

    operator VkSamplerCubicWeightsCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerCubicWeightsCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, CubicFilterWeightsQCOM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, cubicWeights );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerCubicWeightsCreateInfoQCOM const & ) const = default;
#else
    bool operator==( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights );
#  endif
    }

    bool operator!=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType        = StructureType::eSamplerCubicWeightsCreateInfoQCOM;
    const void *           pNext        = {};
    CubicFilterWeightsQCOM cubicWeights = CubicFilterWeightsQCOM::eCatmullRom;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerCubicWeightsCreateInfoQCOM>
  {
    using Type = SamplerCubicWeightsCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerCubicWeightsCreateInfoQCOM>
  {
    using Type = SamplerCubicWeightsCreateInfoQCOM;
  };

  // wrapper struct for struct VkSamplerCustomBorderColorCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCustomBorderColorCreateInfoEXT.html
  struct SamplerCustomBorderColorCreateInfoEXT
  {
    using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerCustomBorderColorCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( ClearColorValue customBorderColor_ = {},
                                                                   Format          format_            = Format::eUndefined,
                                                                   const void *    pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , customBorderColor{ customBorderColor_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
    {
    }

    SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColor = customBorderColor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

    operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

    operator VkSamplerCustomBorderColorCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

    operator VkSamplerCustomBorderColorCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ClearColorValue const &, Format const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, customBorderColor, format );
    }
#endif

  public:
    StructureType   sType             = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
    const void *    pNext             = {};
    ClearColorValue customBorderColor = {};
    Format          format            = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerCustomBorderColorCreateInfoEXT>
  {
    using Type = SamplerCustomBorderColorCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
  {
    using Type = SamplerCustomBorderColorCreateInfoEXT;
  };

  // wrapper struct for struct VkSamplerReductionModeCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerReductionModeCreateInfo.html
  struct SamplerReductionModeCreateInfo
  {
    using NativeType = VkSamplerReductionModeCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerReductionModeCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionMode reductionMode_ = SamplerReductionMode::eWeightedAverage,
                                                         const void *         pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , reductionMode{ reductionMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
    {
    }

    SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      reductionMode = reductionMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerReductionModeCreateInfo *>( this );
    }

    operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerReductionModeCreateInfo *>( this );
    }

    operator VkSamplerReductionModeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerReductionModeCreateInfo *>( this );
    }

    operator VkSamplerReductionModeCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerReductionModeCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SamplerReductionMode const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, reductionMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
#else
    bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode );
#  endif
    }

    bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType         = StructureType::eSamplerReductionModeCreateInfo;
    const void *         pNext         = {};
    SamplerReductionMode reductionMode = SamplerReductionMode::eWeightedAverage;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerReductionModeCreateInfo>
  {
    using Type = SamplerReductionModeCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
  {
    using Type = SamplerReductionModeCreateInfo;
  };

  using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;

  // wrapper struct for struct VkSamplerYcbcrConversionCreateInfo, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionCreateInfo.html
  struct SamplerYcbcrConversionCreateInfo
  {
    using NativeType = VkSamplerYcbcrConversionCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerYcbcrConversionCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( Format                      format_                      = Format::eUndefined,
                                                           SamplerYcbcrModelConversion ycbcrModel_                  = SamplerYcbcrModelConversion::eRgbIdentity,
                                                           SamplerYcbcrRange           ycbcrRange_                  = SamplerYcbcrRange::eItuFull,
                                                           ComponentMapping            components_                  = {},
                                                           ChromaLocation              xChromaOffset_               = ChromaLocation::eCositedEven,
                                                           ChromaLocation              yChromaOffset_               = ChromaLocation::eCositedEven,
                                                           Filter                      chromaFilter_                = Filter::eNearest,
                                                           Bool32                      forceExplicitReconstruction_ = {},
                                                           const void *                pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , ycbcrModel{ ycbcrModel_ }
      , ycbcrRange{ ycbcrRange_ }
      , components{ components_ }
      , xChromaOffset{ xChromaOffset_ }
      , yChromaOffset{ yChromaOffset_ }
      , chromaFilter{ chromaFilter_ }
      , forceExplicitReconstruction{ forceExplicitReconstruction_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
    {
    }

    SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrModel( SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrModel = ycbcrModel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrRange = ycbcrRange_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
    {
      components = components_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      xChromaOffset = xChromaOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      yChromaOffset = yChromaOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      chromaFilter = chromaFilter_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
    {
      forceExplicitReconstruction = forceExplicitReconstruction_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( this );
    }

    operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>( this );
    }

    operator VkSamplerYcbcrConversionCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( this );
    }

    operator VkSamplerYcbcrConversionCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Format const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ComponentMapping const &,
               ChromaLocation const &,
               ChromaLocation const &,
               Filter const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) &&
             ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) &&
             ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) &&
             ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
#  endif
    }

    bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                       = StructureType::eSamplerYcbcrConversionCreateInfo;
    const void *                pNext                       = {};
    Format                      format                      = Format::eUndefined;
    SamplerYcbcrModelConversion ycbcrModel                  = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           ycbcrRange                  = SamplerYcbcrRange::eItuFull;
    ComponentMapping            components                  = {};
    ChromaLocation              xChromaOffset               = ChromaLocation::eCositedEven;
    ChromaLocation              yChromaOffset               = ChromaLocation::eCositedEven;
    Filter                      chromaFilter                = Filter::eNearest;
    Bool32                      forceExplicitReconstruction = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerYcbcrConversionCreateInfo>
  {
    using Type = SamplerYcbcrConversionCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
  {
    using Type = SamplerYcbcrConversionCreateInfo;
  };

  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;

  // wrapper struct for struct VkSamplerYcbcrConversionImageFormatProperties, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionImageFormatProperties.html
  struct SamplerYcbcrConversionImageFormatProperties
  {
    using NativeType = VkSamplerYcbcrConversionImageFormatProperties;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerYcbcrConversionImageFormatProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {},
                                                                      void *   pNext_                               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , combinedImageSamplerDescriptorCount{ combinedImageSamplerDescriptorCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
    {
    }

    SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

    operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

    operator VkSamplerYcbcrConversionImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

    operator VkSamplerYcbcrConversionImageFormatProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, combinedImageSamplerDescriptorCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
#  endif
    }

    bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                               = StructureType::eSamplerYcbcrConversionImageFormatProperties;
    void *        pNext                               = {};
    uint32_t      combinedImageSamplerDescriptorCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerYcbcrConversionImageFormatProperties>
  {
    using Type = SamplerYcbcrConversionImageFormatProperties;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
  {
    using Type = SamplerYcbcrConversionImageFormatProperties;
  };

  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;

  // wrapper struct for struct VkSamplerYcbcrConversionInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionInfo.html
  struct SamplerYcbcrConversionInfo
  {
    using NativeType = VkSamplerYcbcrConversionInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerYcbcrConversionInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversion conversion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , conversion{ conversion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
    {
    }

    SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
    {
      conversion = conversion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo *>( this );
    }

    operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionInfo *>( this );
    }

    operator VkSamplerYcbcrConversionInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerYcbcrConversionInfo *>( this );
    }

    operator VkSamplerYcbcrConversionInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerYcbcrConversionInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SamplerYcbcrConversion const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, conversion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion );
#  endif
    }

    bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType      = StructureType::eSamplerYcbcrConversionInfo;
    const void *           pNext      = {};
    SamplerYcbcrConversion conversion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerYcbcrConversionInfo>
  {
    using Type = SamplerYcbcrConversionInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
  {
    using Type = SamplerYcbcrConversionInfo;
  };

  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;

  // wrapper struct for struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.html
  struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM
  {
    using NativeType = VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( Bool32 enableYDegamma_    = {},
                                                                           Bool32 enableCbCrDegamma_ = {},
                                                                           void * pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , enableYDegamma{ enableYDegamma_ }
      , enableCbCrDegamma{ enableCbCrDegamma_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( *reinterpret_cast<SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *>( &rhs ) )
    {
    }

    SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setEnableYDegamma( Bool32 enableYDegamma_ ) VULKAN_HPP_NOEXCEPT
    {
      enableYDegamma = enableYDegamma_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setEnableCbCrDegamma( Bool32 enableCbCrDegamma_ ) VULKAN_HPP_NOEXCEPT
    {
      enableCbCrDegamma = enableCbCrDegamma_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *>( this );
    }

    operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *>( this );
    }

    operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *>( this );
    }

    operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, enableYDegamma, enableCbCrDegamma );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enableYDegamma == rhs.enableYDegamma ) && ( enableCbCrDegamma == rhs.enableCbCrDegamma );
#  endif
    }

    bool operator!=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
    void *        pNext             = {};
    Bool32        enableYDegamma    = {};
    Bool32        enableCbCrDegamma = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM>
  {
    using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM>
  {
    using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
  };

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkScreenBufferFormatPropertiesQNX, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferFormatPropertiesQNX.html
  struct ScreenBufferFormatPropertiesQNX
  {
    using NativeType = VkScreenBufferFormatPropertiesQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eScreenBufferFormatPropertiesQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( Format                      format_                           = Format::eUndefined,
                                                          uint64_t                    externalFormat_                   = {},
                                                          uint64_t                    screenUsage_                      = {},
                                                          FormatFeatureFlags          formatFeatures_                   = {},
                                                          ComponentMapping            samplerYcbcrConversionComponents_ = {},
                                                          SamplerYcbcrModelConversion suggestedYcbcrModel_    = SamplerYcbcrModelConversion::eRgbIdentity,
                                                          SamplerYcbcrRange           suggestedYcbcrRange_    = SamplerYcbcrRange::eItuFull,
                                                          ChromaLocation              suggestedXChromaOffset_ = ChromaLocation::eCositedEven,
                                                          ChromaLocation              suggestedYChromaOffset_ = ChromaLocation::eCositedEven,
                                                          void *                      pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , externalFormat{ externalFormat_ }
      , screenUsage{ screenUsage_ }
      , formatFeatures{ formatFeatures_ }
      , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ }
      , suggestedYcbcrModel{ suggestedYcbcrModel_ }
      , suggestedYcbcrRange{ suggestedYcbcrRange_ }
      , suggestedXChromaOffset{ suggestedXChromaOffset_ }
      , suggestedYChromaOffset{ suggestedYChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ScreenBufferFormatPropertiesQNX( *reinterpret_cast<ScreenBufferFormatPropertiesQNX const *>( &rhs ) )
    {
    }

    ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ScreenBufferFormatPropertiesQNX const *>( &rhs );
      return *this;
    }

    operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkScreenBufferFormatPropertiesQNX *>( this );
    }

    operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkScreenBufferFormatPropertiesQNX *>( this );
    }

    operator VkScreenBufferFormatPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkScreenBufferFormatPropertiesQNX *>( this );
    }

    operator VkScreenBufferFormatPropertiesQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkScreenBufferFormatPropertiesQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               uint64_t const &,
               uint64_t const &,
               FormatFeatureFlags const &,
               ComponentMapping const &,
               SamplerYcbcrModelConversion const &,
               SamplerYcbcrRange const &,
               ChromaLocation const &,
               ChromaLocation const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       format,
                       externalFormat,
                       screenUsage,
                       formatFeatures,
                       samplerYcbcrConversionComponents,
                       suggestedYcbcrModel,
                       suggestedYcbcrRange,
                       suggestedXChromaOffset,
                       suggestedYChromaOffset );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default;
#  else
    bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) &&
             ( screenUsage == rhs.screenUsage ) && ( formatFeatures == rhs.formatFeatures ) &&
             ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) &&
             ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
             ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
#    endif
    }

    bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType                            = StructureType::eScreenBufferFormatPropertiesQNX;
    void *                      pNext                            = {};
    Format                      format                           = Format::eUndefined;
    uint64_t                    externalFormat                   = {};
    uint64_t                    screenUsage                      = {};
    FormatFeatureFlags          formatFeatures                   = {};
    ComponentMapping            samplerYcbcrConversionComponents = {};
    SamplerYcbcrModelConversion suggestedYcbcrModel              = SamplerYcbcrModelConversion::eRgbIdentity;
    SamplerYcbcrRange           suggestedYcbcrRange              = SamplerYcbcrRange::eItuFull;
    ChromaLocation              suggestedXChromaOffset           = ChromaLocation::eCositedEven;
    ChromaLocation              suggestedYChromaOffset           = ChromaLocation::eCositedEven;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkScreenBufferFormatPropertiesQNX>
  {
    using Type = ScreenBufferFormatPropertiesQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eScreenBufferFormatPropertiesQNX>
  {
    using Type = ScreenBufferFormatPropertiesQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkScreenBufferPropertiesQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferPropertiesQNX.html
  struct ScreenBufferPropertiesQNX
  {
    using NativeType = VkScreenBufferPropertiesQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eScreenBufferPropertiesQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ScreenBufferPropertiesQNX( DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , allocationSize{ allocationSize_ }
      , memoryTypeBits{ memoryTypeBits_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ScreenBufferPropertiesQNX( *reinterpret_cast<ScreenBufferPropertiesQNX const *>( &rhs ) )
    {
    }

    ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ScreenBufferPropertiesQNX const *>( &rhs );
      return *this;
    }

    operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkScreenBufferPropertiesQNX *>( this );
    }

    operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkScreenBufferPropertiesQNX *>( this );
    }

    operator VkScreenBufferPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkScreenBufferPropertiesQNX *>( this );
    }

    operator VkScreenBufferPropertiesQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkScreenBufferPropertiesQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, allocationSize, memoryTypeBits );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default;
#  else
    bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits );
#    endif
    }

    bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType          = StructureType::eScreenBufferPropertiesQNX;
    void *        pNext          = {};
    DeviceSize    allocationSize = {};
    uint32_t      memoryTypeBits = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkScreenBufferPropertiesQNX>
  {
    using Type = ScreenBufferPropertiesQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eScreenBufferPropertiesQNX>
  {
    using Type = ScreenBufferPropertiesQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  // wrapper struct for struct VkScreenSurfaceCreateInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenSurfaceCreateInfoQNX.html
  struct ScreenSurfaceCreateInfoQNX
  {
    using NativeType = VkScreenSurfaceCreateInfoQNX;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eScreenSurfaceCreateInfoQNX;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateFlagsQNX flags_   = {},
                                                     struct _screen_context *    context_ = {},
                                                     struct _screen_window *     window_  = {},
                                                     const void *                pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , context{ context_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
    {
    }

    ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
    {
      context = context_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( this );
    }

    operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX *>( this );
    }

    operator VkScreenSurfaceCreateInfoQNX const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( this );
    }

    operator VkScreenSurfaceCreateInfoQNX *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkScreenSurfaceCreateInfoQNX *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, ScreenSurfaceCreateFlagsQNX const &, struct _screen_context * const &, struct _screen_window * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, context, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default;
#  else
    bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window );
#    endif
    }

    bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType               sType   = StructureType::eScreenSurfaceCreateInfoQNX;
    const void *                pNext   = {};
    ScreenSurfaceCreateFlagsQNX flags   = {};
    struct _screen_context *    context = {};
    struct _screen_window *     window  = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkScreenSurfaceCreateInfoQNX>
  {
    using Type = ScreenSurfaceCreateInfoQNX;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
  {
    using Type = ScreenSurfaceCreateInfoQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

  // wrapper struct for struct VkSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreCreateInfo.html
  struct SemaphoreCreateInfo
  {
    using NativeType = VkSemaphoreCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
    {
    }

    SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreCreateInfo *>( this );
    }

    operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreCreateInfo *>( this );
    }

    operator VkSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreCreateInfo *>( this );
    }

    operator VkSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SemaphoreCreateFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreCreateInfo const & ) const = default;
#else
    bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType = StructureType::eSemaphoreCreateInfo;
    const void *         pNext = {};
    SemaphoreCreateFlags flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreCreateInfo>
  {
    using Type = SemaphoreCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
  {
    using Type = SemaphoreCreateInfo;
  };

  // wrapper struct for struct VkSemaphoreGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetFdInfoKHR.html
  struct SemaphoreGetFdInfoKHR
  {
    using NativeType = VkSemaphoreGetFdInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( Semaphore                           semaphore_  = {},
                                                ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
    {
    }

    SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( this );
    }

    operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR *>( this );
    }

    operator VkSemaphoreGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( this );
    }

    operator VkSemaphoreGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, ExternalSemaphoreHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, handleType );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
#else
    bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
#  endif
    }

    bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType      = StructureType::eSemaphoreGetFdInfoKHR;
    const void *                        pNext      = {};
    Semaphore                           semaphore  = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreGetFdInfoKHR>
  {
    using Type = SemaphoreGetFdInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
  {
    using Type = SemaphoreGetFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkSemaphoreGetWin32HandleInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetWin32HandleInfoKHR.html
  struct SemaphoreGetWin32HandleInfoKHR
  {
    using NativeType = VkSemaphoreGetWin32HandleInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( Semaphore                           semaphore_  = {},
                                                         ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                         const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
    {
    }

    SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

    operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

    operator VkSemaphoreGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

    operator VkSemaphoreGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, ExternalSemaphoreHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                       sType      = StructureType::eSemaphoreGetWin32HandleInfoKHR;
    const void *                        pNext      = {};
    Semaphore                           semaphore  = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreGetWin32HandleInfoKHR>
  {
    using Type = SemaphoreGetWin32HandleInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
  {
    using Type = SemaphoreGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  // wrapper struct for struct VkSemaphoreGetZirconHandleInfoFUCHSIA, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetZirconHandleInfoFUCHSIA.html
  struct SemaphoreGetZirconHandleInfoFUCHSIA
  {
    using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( Semaphore                           semaphore_  = {},
                                                              ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                                              const void *                        pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , handleType{ handleType_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {
    }

    SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, ExternalSemaphoreHandleTypeFlagBits const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, handleType );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType );
#    endif
    }

    bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                       sType      = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
    const void *                        pNext      = {};
    Semaphore                           semaphore  = {};
    ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreGetZirconHandleInfoFUCHSIA>
  {
    using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
  {
    using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  // wrapper struct for struct VkSemaphoreSignalInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSignalInfo.html
  struct SemaphoreSignalInfo
  {
    using NativeType = VkSemaphoreSignalInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreSignalInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphore{ semaphore_ }
      , value{ value_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
    {
    }

    SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
    {
      value = value_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreSignalInfo *>( this );
    }

    operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreSignalInfo *>( this );
    }

    operator VkSemaphoreSignalInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreSignalInfo *>( this );
    }

    operator VkSemaphoreSignalInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreSignalInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Semaphore const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphore, value );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreSignalInfo const & ) const = default;
#else
    bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value );
#  endif
    }

    bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eSemaphoreSignalInfo;
    const void *  pNext     = {};
    Semaphore     semaphore = {};
    uint64_t      value     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreSignalInfo>
  {
    using Type = SemaphoreSignalInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
  {
    using Type = SemaphoreSignalInfo;
  };

  using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;

  // wrapper struct for struct VkSemaphoreTypeCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreTypeCreateInfo.html
  struct SemaphoreTypeCreateInfo
  {
    using NativeType = VkSemaphoreTypeCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreTypeCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreType semaphoreType_ = SemaphoreType::eBinary,
                                                  uint64_t      initialValue_  = {},
                                                  const void *  pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , semaphoreType{ semaphoreType_ }
      , initialValue{ initialValue_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
    {
    }

    SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreType = semaphoreType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
    {
      initialValue = initialValue_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreTypeCreateInfo *>( this );
    }

    operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreTypeCreateInfo *>( this );
    }

    operator VkSemaphoreTypeCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreTypeCreateInfo *>( this );
    }

    operator VkSemaphoreTypeCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreTypeCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SemaphoreType const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, semaphoreType, initialValue );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
#else
    bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue );
#  endif
    }

    bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType         = StructureType::eSemaphoreTypeCreateInfo;
    const void *  pNext         = {};
    SemaphoreType semaphoreType = SemaphoreType::eBinary;
    uint64_t      initialValue  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreTypeCreateInfo>
  {
    using Type = SemaphoreTypeCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
  {
    using Type = SemaphoreTypeCreateInfo;
  };

  using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;

  // wrapper struct for struct VkSemaphoreWaitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreWaitInfo.html
  struct SemaphoreWaitInfo
  {
    using NativeType = VkSemaphoreWaitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSemaphoreWaitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitFlags flags_          = {},
                                            uint32_t           semaphoreCount_ = {},
                                            const Semaphore *  pSemaphores_    = {},
                                            const uint64_t *   pValues_        = {},
                                            const void *       pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , semaphoreCount{ semaphoreCount_ }
      , pSemaphores{ pSemaphores_ }
      , pValues{ pValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo( SemaphoreWaitFlags                               flags_,
                       ArrayProxyNoTemporaries<const Semaphore> const & semaphores_,
                       ArrayProxyNoTemporaries<const uint64_t> const &  values_ = {},
                       const void *                                     pNext_  = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) )
      , pSemaphores( semaphores_.data() )
      , pValues( values_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
#    else
      if ( semaphores_.size() != values_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = semaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSemaphores = pSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo & setSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
      pSemaphores    = semaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pValues = pValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo & setValues( ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = static_cast<uint32_t>( values_.size() );
      pValues        = values_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreWaitInfo *>( this );
    }

    operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreWaitInfo *>( this );
    }

    operator VkSemaphoreWaitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSemaphoreWaitInfo *>( this );
    }

    operator VkSemaphoreWaitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSemaphoreWaitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SemaphoreWaitFlags const &, uint32_t const &, const Semaphore * const &, const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreWaitInfo const & ) const = default;
#else
    bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) &&
             ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues );
#  endif
    }

    bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType          = StructureType::eSemaphoreWaitInfo;
    const void *       pNext          = {};
    SemaphoreWaitFlags flags          = {};
    uint32_t           semaphoreCount = {};
    const Semaphore *  pSemaphores    = {};
    const uint64_t *   pValues        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSemaphoreWaitInfo>
  {
    using Type = SemaphoreWaitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
  {
    using Type = SemaphoreWaitInfo;
  };

  using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;

  // wrapper struct for struct VkSetDescriptorBufferOffsetsInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetDescriptorBufferOffsetsInfoEXT.html
  struct SetDescriptorBufferOffsetsInfoEXT
  {
    using NativeType = VkSetDescriptorBufferOffsetsInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSetDescriptorBufferOffsetsInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( ShaderStageFlags   stageFlags_     = {},
                                                            PipelineLayout     layout_         = {},
                                                            uint32_t           firstSet_       = {},
                                                            uint32_t           setCount_       = {},
                                                            const uint32_t *   pBufferIndices_ = {},
                                                            const DeviceSize * pOffsets_       = {},
                                                            const void *       pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stageFlags{ stageFlags_ }
      , layout{ layout_ }
      , firstSet{ firstSet_ }
      , setCount{ setCount_ }
      , pBufferIndices{ pBufferIndices_ }
      , pOffsets{ pOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetDescriptorBufferOffsetsInfoEXT( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SetDescriptorBufferOffsetsInfoEXT( *reinterpret_cast<SetDescriptorBufferOffsetsInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SetDescriptorBufferOffsetsInfoEXT( ShaderStageFlags                                  stageFlags_,
                                       PipelineLayout                                    layout_,
                                       uint32_t                                          firstSet_,
                                       ArrayProxyNoTemporaries<const uint32_t> const &   bufferIndices_,
                                       ArrayProxyNoTemporaries<const DeviceSize> const & offsets_ = {},
                                       const void *                                      pNext_   = nullptr )
      : pNext( pNext_ )
      , stageFlags( stageFlags_ )
      , layout( layout_ )
      , firstSet( firstSet_ )
      , setCount( static_cast<uint32_t>( bufferIndices_.size() ) )
      , pBufferIndices( bufferIndices_.data() )
      , pOffsets( offsets_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( bufferIndices_.size() == offsets_.size() );
#    else
      if ( bufferIndices_.size() != offsets_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::SetDescriptorBufferOffsetsInfoEXT::SetDescriptorBufferOffsetsInfoEXT: bufferIndices_.size() != offsets_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SetDescriptorBufferOffsetsInfoEXT & operator=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SetDescriptorBufferOffsetsInfoEXT & operator=( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SetDescriptorBufferOffsetsInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setStageFlags( ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setLayout( PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      firstSet = firstSet_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setSetCount( uint32_t setCount_ ) VULKAN_HPP_NOEXCEPT
    {
      setCount = setCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPBufferIndices( const uint32_t * pBufferIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferIndices = pBufferIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SetDescriptorBufferOffsetsInfoEXT & setBufferIndices( ArrayProxyNoTemporaries<const uint32_t> const & bufferIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      setCount       = static_cast<uint32_t>( bufferIndices_.size() );
      pBufferIndices = bufferIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPOffsets( const DeviceSize * pOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pOffsets = pOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SetDescriptorBufferOffsetsInfoEXT & setOffsets( ArrayProxyNoTemporaries<const DeviceSize> const & offsets_ ) VULKAN_HPP_NOEXCEPT
    {
      setCount = static_cast<uint32_t>( offsets_.size() );
      pOffsets = offsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSetDescriptorBufferOffsetsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( this );
    }

    operator VkSetDescriptorBufferOffsetsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSetDescriptorBufferOffsetsInfoEXT *>( this );
    }

    operator VkSetDescriptorBufferOffsetsInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( this );
    }

    operator VkSetDescriptorBufferOffsetsInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSetDescriptorBufferOffsetsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderStageFlags const &,
               PipelineLayout const &,
               uint32_t const &,
               uint32_t const &,
               const uint32_t * const &,
               const DeviceSize * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stageFlags, layout, firstSet, setCount, pBufferIndices, pOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SetDescriptorBufferOffsetsInfoEXT const & ) const = default;
#else
    bool operator==( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) &&
             ( setCount == rhs.setCount ) && ( pBufferIndices == rhs.pBufferIndices ) && ( pOffsets == rhs.pOffsets );
#  endif
    }

    bool operator!=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType          = StructureType::eSetDescriptorBufferOffsetsInfoEXT;
    const void *       pNext          = {};
    ShaderStageFlags   stageFlags     = {};
    PipelineLayout     layout         = {};
    uint32_t           firstSet       = {};
    uint32_t           setCount       = {};
    const uint32_t *   pBufferIndices = {};
    const DeviceSize * pOffsets       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSetDescriptorBufferOffsetsInfoEXT>
  {
    using Type = SetDescriptorBufferOffsetsInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSetDescriptorBufferOffsetsInfoEXT>
  {
    using Type = SetDescriptorBufferOffsetsInfoEXT;
  };

  // wrapper struct for struct VkSetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetLatencyMarkerInfoNV.html
  struct SetLatencyMarkerInfoNV
  {
    using NativeType = VkSetLatencyMarkerInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSetLatencyMarkerInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t        presentID_ = {},
                                                 LatencyMarkerNV marker_    = LatencyMarkerNV::eSimulationStart,
                                                 const void *    pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentID{ presentID_ }
      , marker{ marker_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SetLatencyMarkerInfoNV( *reinterpret_cast<SetLatencyMarkerInfoNV const *>( &rhs ) )
    {
    }

    SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SetLatencyMarkerInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT
    {
      presentID = presentID_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT
    {
      marker = marker_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( this );
    }

    operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSetLatencyMarkerInfoNV *>( this );
    }

    operator VkSetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( this );
    }

    operator VkSetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSetLatencyMarkerInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, LatencyMarkerNV const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentID, marker );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default;
#else
    bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( marker == rhs.marker );
#  endif
    }

    bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType     = StructureType::eSetLatencyMarkerInfoNV;
    const void *    pNext     = {};
    uint64_t        presentID = {};
    LatencyMarkerNV marker    = LatencyMarkerNV::eSimulationStart;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSetLatencyMarkerInfoNV>
  {
    using Type = SetLatencyMarkerInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSetLatencyMarkerInfoNV>
  {
    using Type = SetLatencyMarkerInfoNV;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  // wrapper struct for struct VkSetPresentConfigNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetPresentConfigNV.html
  struct SetPresentConfigNV
  {
    using NativeType = VkSetPresentConfigNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSetPresentConfigNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SetPresentConfigNV( uint32_t numFramesPerBatch_ = {}, uint32_t presentConfigFeedback_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , numFramesPerBatch{ numFramesPerBatch_ }
      , presentConfigFeedback{ presentConfigFeedback_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SetPresentConfigNV( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetPresentConfigNV( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT : SetPresentConfigNV( *reinterpret_cast<SetPresentConfigNV const *>( &rhs ) ) {}

    SetPresentConfigNV & operator=( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SetPresentConfigNV & operator=( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SetPresentConfigNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setNumFramesPerBatch( uint32_t numFramesPerBatch_ ) VULKAN_HPP_NOEXCEPT
    {
      numFramesPerBatch = numFramesPerBatch_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPresentConfigFeedback( uint32_t presentConfigFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      presentConfigFeedback = presentConfigFeedback_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSetPresentConfigNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSetPresentConfigNV *>( this );
    }

    operator VkSetPresentConfigNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSetPresentConfigNV *>( this );
    }

    operator VkSetPresentConfigNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSetPresentConfigNV *>( this );
    }

    operator VkSetPresentConfigNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSetPresentConfigNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, numFramesPerBatch, presentConfigFeedback );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SetPresentConfigNV const & ) const = default;
#  else
    bool operator==( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( numFramesPerBatch == rhs.numFramesPerBatch ) &&
             ( presentConfigFeedback == rhs.presentConfigFeedback );
#    endif
    }

    bool operator!=( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                 = StructureType::eSetPresentConfigNV;
    const void *  pNext                 = {};
    uint32_t      numFramesPerBatch     = {};
    uint32_t      presentConfigFeedback = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSetPresentConfigNV>
  {
    using Type = SetPresentConfigNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSetPresentConfigNV>
  {
    using Type = SetPresentConfigNV;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  // wrapper struct for struct VkSetStateFlagsIndirectCommandNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetStateFlagsIndirectCommandNV.html
  struct SetStateFlagsIndirectCommandNV
  {
    using NativeType = VkSetStateFlagsIndirectCommandNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data{ data_ } {}

    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
    {
    }

    SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV *>( this );
    }

    operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV *>( this );
    }

    operator VkSetStateFlagsIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSetStateFlagsIndirectCommandNV *>( this );
    }

    operator VkSetStateFlagsIndirectCommandNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSetStateFlagsIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( data );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default;
#else
    bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( data == rhs.data );
#  endif
    }

    bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t data = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSetStateFlagsIndirectCommandNV>
  {
    using Type = SetStateFlagsIndirectCommandNV;
  };
#endif

  // wrapper struct for struct VkShaderCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCreateInfoEXT.html
  struct ShaderCreateInfoEXT
  {
    using NativeType = VkShaderCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eShaderCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateFlagsEXT        flags_                  = {},
                                              ShaderStageFlagBits         stage_                  = ShaderStageFlagBits::eVertex,
                                              ShaderStageFlags            nextStage_              = {},
                                              ShaderCodeTypeEXT           codeType_               = ShaderCodeTypeEXT::eBinary,
                                              size_t                      codeSize_               = {},
                                              const void *                pCode_                  = {},
                                              const char *                pName_                  = {},
                                              uint32_t                    setLayoutCount_         = {},
                                              const DescriptorSetLayout * pSetLayouts_            = {},
                                              uint32_t                    pushConstantRangeCount_ = {},
                                              const PushConstantRange *   pPushConstantRanges_    = {},
                                              const SpecializationInfo *  pSpecializationInfo_    = {},
                                              const void *                pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , stage{ stage_ }
      , nextStage{ nextStage_ }
      , codeType{ codeType_ }
      , codeSize{ codeSize_ }
      , pCode{ pCode_ }
      , pName{ pName_ }
      , setLayoutCount{ setLayoutCount_ }
      , pSetLayouts{ pSetLayouts_ }
      , pushConstantRangeCount{ pushConstantRangeCount_ }
      , pPushConstantRanges{ pPushConstantRanges_ }
      , pSpecializationInfo{ pSpecializationInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast<ShaderCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ShaderCreateInfoEXT( ShaderCreateFlagsEXT                                       flags_,
                         ShaderStageFlagBits                                        stage_,
                         ShaderStageFlags                                           nextStage_,
                         ShaderCodeTypeEXT                                          codeType_,
                         ArrayProxyNoTemporaries<const T> const &                   code_,
                         const char *                                               pName_               = {},
                         ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_          = {},
                         ArrayProxyNoTemporaries<const PushConstantRange> const &   pushConstantRanges_  = {},
                         const SpecializationInfo *                                 pSpecializationInfo_ = {},
                         const void *                                               pNext_               = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , stage( stage_ )
      , nextStage( nextStage_ )
      , codeType( codeType_ )
      , codeSize( code_.size() * sizeof( T ) )
      , pCode( code_.data() )
      , pName( pName_ )
      , setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
      , pSetLayouts( setLayouts_.data() )
      , pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
      , pPushConstantRanges( pushConstantRanges_.data() )
      , pSpecializationInfo( pSpecializationInfo_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT
    {
      nextStage = nextStage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT
    {
      codeType = codeType_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = codeSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT
    {
      pCode = pCode_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ShaderCreateInfoEXT & setCode( ArrayProxyNoTemporaries<const T> const & code_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = code_.size() * sizeof( T );
      pCode    = code_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = setLayoutCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderCreateInfoEXT & setSetLayouts( ArrayProxyNoTemporaries<const DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts    = setLayouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = pushConstantRangeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPPushConstantRanges( const PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pPushConstantRanges = pPushConstantRanges_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderCreateInfoEXT & setPushConstantRanges( ArrayProxyNoTemporaries<const PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
      pPushConstantRanges    = pushConstantRanges_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpecializationInfo = pSpecializationInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderCreateInfoEXT *>( this );
    }

    operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderCreateInfoEXT *>( this );
    }

    operator VkShaderCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderCreateInfoEXT *>( this );
    }

    operator VkShaderCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               ShaderCreateFlagsEXT const &,
               ShaderStageFlagBits const &,
               ShaderStageFlags const &,
               ShaderCodeTypeEXT const &,
               size_t const &,
               const void * const &,
               const char * const &,
               uint32_t const &,
               const DescriptorSetLayout * const &,
               uint32_t const &,
               const PushConstantRange * const &,
               const SpecializationInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       stage,
                       nextStage,
                       codeType,
                       codeSize,
                       pCode,
                       pName,
                       setLayoutCount,
                       pSetLayouts,
                       pushConstantRangeCount,
                       pPushConstantRanges,
                       pSpecializationInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
        return cmp;
      if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 )
        return cmp;
      if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 )
        return cmp;
      if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 )
        return cmp;
      if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 )
        return cmp;
      if ( pName != rhs.pName )
        if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
          return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 )
        return cmp;
      if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 )
        return cmp;
      if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 )
        return cmp;
      if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) &&
             ( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) &&
             ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) &&
             ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
             ( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
    }

    bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType               sType                  = StructureType::eShaderCreateInfoEXT;
    const void *                pNext                  = {};
    ShaderCreateFlagsEXT        flags                  = {};
    ShaderStageFlagBits         stage                  = ShaderStageFlagBits::eVertex;
    ShaderStageFlags            nextStage              = {};
    ShaderCodeTypeEXT           codeType               = ShaderCodeTypeEXT::eBinary;
    size_t                      codeSize               = {};
    const void *                pCode                  = {};
    const char *                pName                  = {};
    uint32_t                    setLayoutCount         = {};
    const DescriptorSetLayout * pSetLayouts            = {};
    uint32_t                    pushConstantRangeCount = {};
    const PushConstantRange *   pPushConstantRanges    = {};
    const SpecializationInfo *  pSpecializationInfo    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderCreateInfoEXT>
  {
    using Type = ShaderCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eShaderCreateInfoEXT>
  {
    using Type = ShaderCreateInfoEXT;
  };

  // wrapper struct for struct VkShaderModuleCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleCreateInfo.html
  struct ShaderModuleCreateInfo
  {
    using NativeType = VkShaderModuleCreateInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eShaderModuleCreateInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_    = {},
                                                 size_t                  codeSize_ = {},
                                                 const uint32_t *        pCode_    = {},
                                                 const void *            pNext_    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , codeSize{ codeSize_ }
      , pCode{ pCode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_, ArrayProxyNoTemporaries<const uint32_t> const & code_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = codeSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT
    {
      pCode = pCode_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderModuleCreateInfo & setCode( ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = code_.size() * 4;
      pCode    = code_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderModuleCreateInfo *>( this );
    }

    operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderModuleCreateInfo *>( this );
    }

    operator VkShaderModuleCreateInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderModuleCreateInfo *>( this );
    }

    operator VkShaderModuleCreateInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderModuleCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ShaderModuleCreateFlags const &, size_t const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, codeSize, pCode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderModuleCreateInfo const & ) const = default;
#else
    bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode );
#  endif
    }

    bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType    = StructureType::eShaderModuleCreateInfo;
    const void *            pNext    = {};
    ShaderModuleCreateFlags flags    = {};
    size_t                  codeSize = {};
    const uint32_t *        pCode    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderModuleCreateInfo>
  {
    using Type = ShaderModuleCreateInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
  {
    using Type = ShaderModuleCreateInfo;
  };

  // wrapper struct for struct VkShaderModuleIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleIdentifierEXT.html
  struct ShaderModuleIdentifierEXT
  {
    using NativeType = VkShaderModuleIdentifierEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eShaderModuleIdentifierEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t                                                              identifierSize_ = {},
                                                       std::array<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const & identifier_     = {},
                                                       void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , identifierSize{ identifierSize_ }
      , identifier{ identifier_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderModuleIdentifierEXT( *reinterpret_cast<ShaderModuleIdentifierEXT const *>( &rhs ) )
    {
    }

    ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderModuleIdentifierEXT const *>( &rhs );
      return *this;
    }

    operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderModuleIdentifierEXT *>( this );
    }

    operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderModuleIdentifierEXT *>( this );
    }

    operator VkShaderModuleIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderModuleIdentifierEXT *>( this );
    }

    operator VkShaderModuleIdentifierEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderModuleIdentifierEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, identifierSize, identifier );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = identifierSize <=> rhs.identifierSize; cmp != 0 )
        return cmp;
      for ( size_t i = 0; i < identifierSize; ++i )
      {
        if ( auto cmp = identifier[i] <=> rhs.identifier[i]; cmp != 0 )
          return cmp;
      }

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) &&
             ( memcmp( identifier, rhs.identifier, identifierSize * sizeof( uint8_t ) ) == 0 );
    }

    bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                                                     sType          = StructureType::eShaderModuleIdentifierEXT;
    void *                                                            pNext          = {};
    uint32_t                                                          identifierSize = {};
    ArrayWrapper1D<uint8_t, VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT> identifier     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderModuleIdentifierEXT>
  {
    using Type = ShaderModuleIdentifierEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eShaderModuleIdentifierEXT>
  {
    using Type = ShaderModuleIdentifierEXT;
  };

  // wrapper struct for struct VkShaderModuleValidationCacheCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleValidationCacheCreateInfoEXT.html
  struct ShaderModuleValidationCacheCreateInfoEXT
  {
    using NativeType = VkShaderModuleValidationCacheCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eShaderModuleValidationCacheCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ValidationCacheEXT validationCache_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , validationCache{ validationCache_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
    {
    }

    ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
    {
      validationCache = validationCache_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

    operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

    operator VkShaderModuleValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

    operator VkShaderModuleValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ValidationCacheEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, validationCache );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default;
#else
    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache );
#  endif
    }

    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType           = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
    const void *       pNext           = {};
    ValidationCacheEXT validationCache = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderModuleValidationCacheCreateInfoEXT>
  {
    using Type = ShaderModuleValidationCacheCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
  {
    using Type = ShaderModuleValidationCacheCreateInfoEXT;
  };

  // wrapper struct for struct VkShaderResourceUsageAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderResourceUsageAMD.html
  struct ShaderResourceUsageAMD
  {
    using NativeType = VkShaderResourceUsageAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_             = {},
                                                 uint32_t numUsedSgprs_             = {},
                                                 uint32_t ldsSizePerLocalWorkGroup_ = {},
                                                 size_t   ldsUsageSizeInBytes_      = {},
                                                 size_t   scratchMemUsageInBytes_   = {} ) VULKAN_HPP_NOEXCEPT
      : numUsedVgprs{ numUsedVgprs_ }
      , numUsedSgprs{ numUsedSgprs_ }
      , ldsSizePerLocalWorkGroup{ ldsSizePerLocalWorkGroup_ }
      , ldsUsageSizeInBytes{ ldsUsageSizeInBytes_ }
      , scratchMemUsageInBytes{ scratchMemUsageInBytes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
    {
    }

    ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs );
      return *this;
    }

    operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderResourceUsageAMD *>( this );
    }

    operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderResourceUsageAMD *>( this );
    }

    operator VkShaderResourceUsageAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderResourceUsageAMD *>( this );
    }

    operator VkShaderResourceUsageAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderResourceUsageAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderResourceUsageAMD const & ) const = default;
#else
    bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) &&
             ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
#  endif
    }

    bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t numUsedVgprs             = {};
    uint32_t numUsedSgprs             = {};
    uint32_t ldsSizePerLocalWorkGroup = {};
    size_t   ldsUsageSizeInBytes      = {};
    size_t   scratchMemUsageInBytes   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderResourceUsageAMD>
  {
    using Type = ShaderResourceUsageAMD;
  };
#endif

  // wrapper struct for struct VkShaderStatisticsInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderStatisticsInfoAMD.html
  struct ShaderStatisticsInfoAMD
  {
    using NativeType = VkShaderStatisticsInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStageFlags                shaderStageMask_      = {},
                                                     ShaderResourceUsageAMD          resourceUsage_        = {},
                                                     uint32_t                        numPhysicalVgprs_     = {},
                                                     uint32_t                        numPhysicalSgprs_     = {},
                                                     uint32_t                        numAvailableVgprs_    = {},
                                                     uint32_t                        numAvailableSgprs_    = {},
                                                     std::array<uint32_t, 3> const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderStageMask{ shaderStageMask_ }
      , resourceUsage{ resourceUsage_ }
      , numPhysicalVgprs{ numPhysicalVgprs_ }
      , numPhysicalSgprs{ numPhysicalSgprs_ }
      , numAvailableVgprs{ numAvailableVgprs_ }
      , numAvailableSgprs{ numAvailableSgprs_ }
      , computeWorkGroupSize{ computeWorkGroupSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
    {
    }

    ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs );
      return *this;
    }

    operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderStatisticsInfoAMD *>( this );
    }

    operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderStatisticsInfoAMD *>( this );
    }

    operator VkShaderStatisticsInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkShaderStatisticsInfoAMD *>( this );
    }

    operator VkShaderStatisticsInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkShaderStatisticsInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ShaderStageFlags const &,
               ShaderResourceUsageAMD const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               ArrayWrapper1D<uint32_t, 3> const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default;
#else
    bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) &&
             ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) &&
             ( computeWorkGroupSize == rhs.computeWorkGroupSize );
#  endif
    }

    bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ShaderStageFlags            shaderStageMask      = {};
    ShaderResourceUsageAMD      resourceUsage        = {};
    uint32_t                    numPhysicalVgprs     = {};
    uint32_t                    numPhysicalSgprs     = {};
    uint32_t                    numAvailableVgprs    = {};
    uint32_t                    numAvailableSgprs    = {};
    ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkShaderStatisticsInfoAMD>
  {
    using Type = ShaderStatisticsInfoAMD;
  };
#endif

  // wrapper struct for struct VkSharedPresentSurfaceCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSharedPresentSurfaceCapabilitiesKHR.html
  struct SharedPresentSurfaceCapabilitiesKHR
  {
    using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSharedPresentSurfaceCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( ImageUsageFlags sharedPresentSupportedUsageFlags_ = {},
                                                              void *          pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , sharedPresentSupportedUsageFlags{ sharedPresentSupportedUsageFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
    {
    }

    SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, ImageUsageFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, sharedPresentSupportedUsageFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
#  endif
    }

    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType                            = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
    void *          pNext                            = {};
    ImageUsageFlags sharedPresentSupportedUsageFlags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSharedPresentSurfaceCapabilitiesKHR>
  {
    using Type = SharedPresentSurfaceCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
  {
    using Type = SharedPresentSurfaceCapabilitiesKHR;
  };

  // wrapper struct for struct VkSparseImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties.html
  struct SparseImageFormatProperties
  {
    using NativeType = VkSparseImageFormatProperties;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageFormatProperties( ImageAspectFlags aspectMask_ = {}, Extent3D imageGranularity_ = {}, SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask{ aspectMask_ }
      , imageGranularity{ imageGranularity_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
    {
    }

    SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageFormatProperties *>( this );
    }

    operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageFormatProperties *>( this );
    }

    operator VkSparseImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageFormatProperties *>( this );
    }

    operator VkSparseImageFormatProperties *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<ImageAspectFlags const &, Extent3D const &, SparseImageFormatFlags const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( aspectMask, imageGranularity, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageFormatProperties const & ) const = default;
#else
    bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    ImageAspectFlags       aspectMask       = {};
    Extent3D               imageGranularity = {};
    SparseImageFormatFlags flags            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageFormatProperties>
  {
    using Type = SparseImageFormatProperties;
  };
#endif

  // wrapper struct for struct VkSparseImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties2.html
  struct SparseImageFormatProperties2
  {
    using NativeType = VkSparseImageFormatProperties2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSparseImageFormatProperties2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties properties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , properties{ properties_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
    {
    }

    SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageFormatProperties2 *>( this );
    }

    operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageFormatProperties2 *>( this );
    }

    operator VkSparseImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageFormatProperties2 *>( this );
    }

    operator VkSparseImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SparseImageFormatProperties const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, properties );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageFormatProperties2 const & ) const = default;
#else
    bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
#  endif
    }

    bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType      = StructureType::eSparseImageFormatProperties2;
    void *                      pNext      = {};
    SparseImageFormatProperties properties = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageFormatProperties2>
  {
    using Type = SparseImageFormatProperties2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
  {
    using Type = SparseImageFormatProperties2;
  };

  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;

  // wrapper struct for struct VkSparseImageMemoryRequirements, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements.html
  struct SparseImageMemoryRequirements
  {
    using NativeType = VkSparseImageMemoryRequirements;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageFormatProperties formatProperties_     = {},
                                                        uint32_t                    imageMipTailFirstLod_ = {},
                                                        DeviceSize                  imageMipTailSize_     = {},
                                                        DeviceSize                  imageMipTailOffset_   = {},
                                                        DeviceSize                  imageMipTailStride_   = {} ) VULKAN_HPP_NOEXCEPT
      : formatProperties{ formatProperties_ }
      , imageMipTailFirstLod{ imageMipTailFirstLod_ }
      , imageMipTailSize{ imageMipTailSize_ }
      , imageMipTailOffset{ imageMipTailOffset_ }
      , imageMipTailStride{ imageMipTailStride_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
    {
    }

    SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs );
      return *this;
    }

    operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryRequirements *>( this );
    }

    operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryRequirements *>( this );
    }

    operator VkSparseImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageMemoryRequirements *>( this );
    }

    operator VkSparseImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<SparseImageFormatProperties const &, uint32_t const &, DeviceSize const &, DeviceSize const &, DeviceSize const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryRequirements const & ) const = default;
#else
    bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) &&
             ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) &&
             ( imageMipTailStride == rhs.imageMipTailStride );
#  endif
    }

    bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    SparseImageFormatProperties formatProperties     = {};
    uint32_t                    imageMipTailFirstLod = {};
    DeviceSize                  imageMipTailSize     = {};
    DeviceSize                  imageMipTailOffset   = {};
    DeviceSize                  imageMipTailStride   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageMemoryRequirements>
  {
    using Type = SparseImageMemoryRequirements;
  };
#endif

  // wrapper struct for struct VkSparseImageMemoryRequirements2, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements2.html
  struct SparseImageMemoryRequirements2
  {
    using NativeType = VkSparseImageMemoryRequirements2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSparseImageMemoryRequirements2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryRequirements{ memoryRequirements_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
    {
    }

    SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs );
      return *this;
    }

    operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryRequirements2 *>( this );
    }

    operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryRequirements2 *>( this );
    }

    operator VkSparseImageMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSparseImageMemoryRequirements2 *>( this );
    }

    operator VkSparseImageMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSparseImageMemoryRequirements2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SparseImageMemoryRequirements const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryRequirements );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default;
#else
    bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
#  endif
    }

    bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType              = StructureType::eSparseImageMemoryRequirements2;
    void *                        pNext              = {};
    SparseImageMemoryRequirements memoryRequirements = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSparseImageMemoryRequirements2>
  {
    using Type = SparseImageMemoryRequirements2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
  {
    using Type = SparseImageMemoryRequirements2;
  };

  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;

#if defined( VK_USE_PLATFORM_GGP )
  // wrapper struct for struct VkStreamDescriptorSurfaceCreateInfoGGP, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStreamDescriptorSurfaceCreateInfoGGP.html
  struct StreamDescriptorSurfaceCreateInfoGGP
  {
    using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateFlagsGGP flags_            = {},
                                                               GgpStreamDescriptor                   streamDescriptor_ = {},
                                                               const void *                          pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , streamDescriptor{ streamDescriptor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
      : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
    {
    }

    StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setFlags( StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
    {
      streamDescriptor = streamDescriptor_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

    operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

    operator VkStreamDescriptorSurfaceCreateInfoGGP const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

    operator VkStreamDescriptorSurfaceCreateInfoGGP *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StreamDescriptorSurfaceCreateFlagsGGP const &, GgpStreamDescriptor const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, streamDescriptor );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
    }

    bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                         sType            = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
    const void *                          pNext            = {};
    StreamDescriptorSurfaceCreateFlagsGGP flags            = {};
    GgpStreamDescriptor                   streamDescriptor = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkStreamDescriptorSurfaceCreateInfoGGP>
  {
    using Type = StreamDescriptorSurfaceCreateInfoGGP;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
  {
    using Type = StreamDescriptorSurfaceCreateInfoGGP;
  };
#endif /*VK_USE_PLATFORM_GGP*/

  // wrapper struct for struct VkSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo.html
  struct SubmitInfo
  {
    using NativeType = VkSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t                   waitSemaphoreCount_   = {},
                                     const Semaphore *          pWaitSemaphores_      = {},
                                     const PipelineStageFlags * pWaitDstStageMask_    = {},
                                     uint32_t                   commandBufferCount_   = {},
                                     const CommandBuffer *      pCommandBuffers_      = {},
                                     uint32_t                   signalSemaphoreCount_ = {},
                                     const Semaphore *          pSignalSemaphores_    = {},
                                     const void *               pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreCount{ waitSemaphoreCount_ }
      , pWaitSemaphores{ pWaitSemaphores_ }
      , pWaitDstStageMask{ pWaitDstStageMask_ }
      , commandBufferCount{ commandBufferCount_ }
      , pCommandBuffers{ pCommandBuffers_ }
      , signalSemaphoreCount{ signalSemaphoreCount_ }
      , pSignalSemaphores{ pSignalSemaphores_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo( ArrayProxyNoTemporaries<const Semaphore> const &          waitSemaphores_,
                ArrayProxyNoTemporaries<const PipelineStageFlags> const & waitDstStageMask_ = {},
                ArrayProxyNoTemporaries<const CommandBuffer> const &      commandBuffers_   = {},
                ArrayProxyNoTemporaries<const Semaphore> const &          signalSemaphores_ = {},
                const void *                                              pNext_            = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , pWaitDstStageMask( waitDstStageMask_.data() )
      , commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) )
      , pCommandBuffers( commandBuffers_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
      , pSignalSemaphores( signalSemaphores_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
#    else
      if ( waitSemaphores_.size() != waitDstStageMask_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setWaitSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitDstStageMask = pWaitDstStageMask_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setWaitDstStageMask( ArrayProxyNoTemporaries<const PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
      pWaitDstStageMask  = waitDstStageMask_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBuffers = pCommandBuffers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setCommandBuffers( ArrayProxyNoTemporaries<const CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
      pCommandBuffers    = commandBuffers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphores = pSignalSemaphores_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setSignalSemaphores( ArrayProxyNoTemporaries<const Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
      pSignalSemaphores    = signalSemaphores_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubmitInfo *>( this );
    }

    operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubmitInfo *>( this );
    }

    operator VkSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubmitInfo *>( this );
    }

    operator VkSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const Semaphore * const &,
               const PipelineStageFlags * const &,
               uint32_t const &,
               const CommandBuffer * const &,
               uint32_t const &,
               const Semaphore * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubmitInfo const & ) const = default;
#else
    bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) &&
             ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
             ( pSignalSemaphores == rhs.pSignalSemaphores );
#  endif
    }

    bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                = StructureType::eSubmitInfo;
    const void *               pNext                = {};
    uint32_t                   waitSemaphoreCount   = {};
    const Semaphore *          pWaitSemaphores      = {};
    const PipelineStageFlags * pWaitDstStageMask    = {};
    uint32_t                   commandBufferCount   = {};
    const CommandBuffer *      pCommandBuffers      = {};
    uint32_t                   signalSemaphoreCount = {};
    const Semaphore *          pSignalSemaphores    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubmitInfo>
  {
    using Type = SubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubmitInfo>
  {
    using Type = SubmitInfo;
  };

  // wrapper struct for struct VkSubmitInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo2.html
  struct SubmitInfo2
  {
    using NativeType = VkSubmitInfo2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubmitInfo2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitFlags                     flags_                    = {},
                                      uint32_t                        waitSemaphoreInfoCount_   = {},
                                      const SemaphoreSubmitInfo *     pWaitSemaphoreInfos_      = {},
                                      uint32_t                        commandBufferInfoCount_   = {},
                                      const CommandBufferSubmitInfo * pCommandBufferInfos_      = {},
                                      uint32_t                        signalSemaphoreInfoCount_ = {},
                                      const SemaphoreSubmitInfo *     pSignalSemaphoreInfos_    = {},
                                      const void *                    pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , waitSemaphoreInfoCount{ waitSemaphoreInfoCount_ }
      , pWaitSemaphoreInfos{ pWaitSemaphoreInfos_ }
      , commandBufferInfoCount{ commandBufferInfoCount_ }
      , pCommandBufferInfos{ pCommandBufferInfos_ }
      , signalSemaphoreInfoCount{ signalSemaphoreInfoCount_ }
      , pSignalSemaphoreInfos{ pSignalSemaphoreInfos_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2( SubmitFlags                                                    flags_,
                 ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const &     waitSemaphoreInfos_,
                 ArrayProxyNoTemporaries<const CommandBufferSubmitInfo> const & commandBufferInfos_   = {},
                 ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const &     signalSemaphoreInfos_ = {},
                 const void *                                                   pNext_                = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) )
      , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() )
      , commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) )
      , pCommandBufferInfos( commandBufferInfos_.data() )
      , signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) )
      , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubmitInfo2 const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2 & setWaitSemaphoreInfos( ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
      pWaitSemaphoreInfos    = waitSemaphoreInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferInfoCount = commandBufferInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos( const CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBufferInfos = pCommandBufferInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2 & setCommandBufferInfos( ArrayProxyNoTemporaries<const CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
      pCommandBufferInfos    = commandBufferInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos( const SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2 & setSignalSemaphoreInfos( ArrayProxyNoTemporaries<const SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
      pSignalSemaphoreInfos    = signalSemaphoreInfos_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubmitInfo2 *>( this );
    }

    operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubmitInfo2 *>( this );
    }

    operator VkSubmitInfo2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubmitInfo2 *>( this );
    }

    operator VkSubmitInfo2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubmitInfo2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               SubmitFlags const &,
               uint32_t const &,
               const SemaphoreSubmitInfo * const &,
               uint32_t const &,
               const CommandBufferSubmitInfo * const &,
               uint32_t const &,
               const SemaphoreSubmitInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       waitSemaphoreInfoCount,
                       pWaitSemaphoreInfos,
                       commandBufferInfoCount,
                       pCommandBufferInfos,
                       signalSemaphoreInfoCount,
                       pSignalSemaphoreInfos );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubmitInfo2 const & ) const = default;
#else
    bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) &&
             ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) &&
             ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) &&
             ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
#  endif
    }

    bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                    = StructureType::eSubmitInfo2;
    const void *                    pNext                    = {};
    SubmitFlags                     flags                    = {};
    uint32_t                        waitSemaphoreInfoCount   = {};
    const SemaphoreSubmitInfo *     pWaitSemaphoreInfos      = {};
    uint32_t                        commandBufferInfoCount   = {};
    const CommandBufferSubmitInfo * pCommandBufferInfos      = {};
    uint32_t                        signalSemaphoreInfoCount = {};
    const SemaphoreSubmitInfo *     pSignalSemaphoreInfos    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubmitInfo2>
  {
    using Type = SubmitInfo2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubmitInfo2>
  {
    using Type = SubmitInfo2;
  };

  using SubmitInfo2KHR = SubmitInfo2;

  // wrapper struct for struct VkSubpassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassBeginInfo.html
  struct SubpassBeginInfo
  {
    using NativeType = VkSubpassBeginInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassBeginInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassContents contents_ = SubpassContents::eInline, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , contents{ contents_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) ) {}

    SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassBeginInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
    {
      contents = contents_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassBeginInfo *>( this );
    }

    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassBeginInfo *>( this );
    }

    operator VkSubpassBeginInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassBeginInfo *>( this );
    }

    operator VkSubpassBeginInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SubpassContents const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, contents );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassBeginInfo const & ) const = default;
#else
    bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents );
#  endif
    }

    bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType    = StructureType::eSubpassBeginInfo;
    const void *    pNext    = {};
    SubpassContents contents = SubpassContents::eInline;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassBeginInfo>
  {
    using Type = SubpassBeginInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
  {
    using Type = SubpassBeginInfo;
  };

  using SubpassBeginInfoKHR = SubpassBeginInfo;

  // wrapper struct for struct VkSubpassDescriptionDepthStencilResolve, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescriptionDepthStencilResolve.html
  struct SubpassDescriptionDepthStencilResolve
  {
    using NativeType = VkSubpassDescriptionDepthStencilResolve;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassDescriptionDepthStencilResolve;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( ResolveModeFlagBits          depthResolveMode_               = ResolveModeFlagBits::eNone,
                                                                ResolveModeFlagBits          stencilResolveMode_             = ResolveModeFlagBits::eNone,
                                                                const AttachmentReference2 * pDepthStencilResolveAttachment_ = {},
                                                                const void *                 pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , depthResolveMode{ depthResolveMode_ }
      , stencilResolveMode{ stencilResolveMode_ }
      , pDepthStencilResolveAttachment{ pDepthStencilResolveAttachment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
    {
    }

    SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setDepthResolveMode( ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      depthResolveMode = depthResolveMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setStencilResolveMode( ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilResolveMode = stencilResolveMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
      setPDepthStencilResolveAttachment( const AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve *>( this );
    }

    operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve *>( this );
    }

    operator VkSubpassDescriptionDepthStencilResolve const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve *>( this );
    }

    operator VkSubpassDescriptionDepthStencilResolve *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassDescriptionDepthStencilResolve *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ResolveModeFlagBits const &, ResolveModeFlagBits const &, const AttachmentReference2 * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
#else
    bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) &&
             ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
#  endif
    }

    bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                sType                          = StructureType::eSubpassDescriptionDepthStencilResolve;
    const void *                 pNext                          = {};
    ResolveModeFlagBits          depthResolveMode               = ResolveModeFlagBits::eNone;
    ResolveModeFlagBits          stencilResolveMode             = ResolveModeFlagBits::eNone;
    const AttachmentReference2 * pDepthStencilResolveAttachment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassDescriptionDepthStencilResolve>
  {
    using Type = SubpassDescriptionDepthStencilResolve;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
  {
    using Type = SubpassDescriptionDepthStencilResolve;
  };

  using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;

  // wrapper struct for struct VkSubpassEndInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassEndInfo.html
  struct SubpassEndInfo
  {
    using NativeType = VkSubpassEndInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassEndInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {}

    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) ) {}

    SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassEndInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassEndInfo *>( this );
    }

    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassEndInfo *>( this );
    }

    operator VkSubpassEndInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassEndInfo *>( this );
    }

    operator VkSubpassEndInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassEndInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassEndInfo const & ) const = default;
#else
    bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
#  endif
    }

    bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eSubpassEndInfo;
    const void *  pNext = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassEndInfo>
  {
    using Type = SubpassEndInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassEndInfo>
  {
    using Type = SubpassEndInfo;
  };

  using SubpassEndInfoKHR = SubpassEndInfo;

  // wrapper struct for struct VkSubpassResolvePerformanceQueryEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassResolvePerformanceQueryEXT.html
  struct SubpassResolvePerformanceQueryEXT
  {
    using NativeType = VkSubpassResolvePerformanceQueryEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassResolvePerformanceQueryEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , optimal{ optimal_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassResolvePerformanceQueryEXT( *reinterpret_cast<SubpassResolvePerformanceQueryEXT const *>( &rhs ) )
    {
    }

    SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassResolvePerformanceQueryEXT const *>( &rhs );
      return *this;
    }

    operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT *>( this );
    }

    operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassResolvePerformanceQueryEXT *>( this );
    }

    operator VkSubpassResolvePerformanceQueryEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassResolvePerformanceQueryEXT *>( this );
    }

    operator VkSubpassResolvePerformanceQueryEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassResolvePerformanceQueryEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, optimal );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default;
#else
    bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal );
#  endif
    }

    bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType   = StructureType::eSubpassResolvePerformanceQueryEXT;
    void *        pNext   = {};
    Bool32        optimal = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassResolvePerformanceQueryEXT>
  {
    using Type = SubpassResolvePerformanceQueryEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassResolvePerformanceQueryEXT>
  {
    using Type = SubpassResolvePerformanceQueryEXT;
  };

  // wrapper struct for struct VkSubpassShadingPipelineCreateInfoHUAWEI, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassShadingPipelineCreateInfoHUAWEI.html
  struct SubpassShadingPipelineCreateInfoHUAWEI
  {
    using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SubpassShadingPipelineCreateInfoHUAWEI( RenderPass renderPass_ = {}, uint32_t subpass_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , renderPass{ renderPass_ }
      , subpass{ subpass_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
    {
    }

    SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setRenderPass( RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
    }

    operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
    }

    operator VkSubpassShadingPipelineCreateInfoHUAWEI const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
    }

    operator VkSubpassShadingPipelineCreateInfoHUAWEI *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, RenderPass const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, renderPass, subpass );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default;
#else
    bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass );
#  endif
    }

    bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
    void *        pNext      = {};
    RenderPass    renderPass = {};
    uint32_t      subpass    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubpassShadingPipelineCreateInfoHUAWEI>
  {
    using Type = SubpassShadingPipelineCreateInfoHUAWEI;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubpassShadingPipelineCreateInfoHUAWEI>
  {
    using Type = SubpassShadingPipelineCreateInfoHUAWEI;
  };

  // wrapper struct for struct VkSubresourceHostMemcpySize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceHostMemcpySize.html
  struct SubresourceHostMemcpySize
  {
    using NativeType = VkSubresourceHostMemcpySize;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubresourceHostMemcpySize;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubresourceHostMemcpySize( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubresourceHostMemcpySize( *reinterpret_cast<SubresourceHostMemcpySize const *>( &rhs ) )
    {
    }

    SubresourceHostMemcpySize & operator=( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubresourceHostMemcpySize & operator=( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubresourceHostMemcpySize const *>( &rhs );
      return *this;
    }

    operator VkSubresourceHostMemcpySize const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubresourceHostMemcpySize *>( this );
    }

    operator VkSubresourceHostMemcpySize &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubresourceHostMemcpySize *>( this );
    }

    operator VkSubresourceHostMemcpySize const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubresourceHostMemcpySize *>( this );
    }

    operator VkSubresourceHostMemcpySize *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubresourceHostMemcpySize *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubresourceHostMemcpySize const & ) const = default;
#else
    bool operator==( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eSubresourceHostMemcpySize;
    void *        pNext = {};
    DeviceSize    size  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubresourceHostMemcpySize>
  {
    using Type = SubresourceHostMemcpySize;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubresourceHostMemcpySize>
  {
    using Type = SubresourceHostMemcpySize;
  };

  using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize;

  // wrapper struct for struct VkSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout2.html
  struct SubresourceLayout2
  {
    using NativeType = VkSubresourceLayout2;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSubresourceLayout2;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , subresourceLayout{ subresourceLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubresourceLayout2( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout2( *reinterpret_cast<SubresourceLayout2 const *>( &rhs ) ) {}

    SubresourceLayout2 & operator=( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SubresourceLayout2 & operator=( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SubresourceLayout2 const *>( &rhs );
      return *this;
    }

    operator VkSubresourceLayout2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubresourceLayout2 *>( this );
    }

    operator VkSubresourceLayout2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubresourceLayout2 *>( this );
    }

    operator VkSubresourceLayout2 const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSubresourceLayout2 *>( this );
    }

    operator VkSubresourceLayout2 *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSubresourceLayout2 *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SubresourceLayout const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, subresourceLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubresourceLayout2 const & ) const = default;
#else
    bool operator==( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout );
#  endif
    }

    bool operator!=( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType     sType             = StructureType::eSubresourceLayout2;
    void *            pNext             = {};
    SubresourceLayout subresourceLayout = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSubresourceLayout2>
  {
    using Type = SubresourceLayout2;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSubresourceLayout2>
  {
    using Type = SubresourceLayout2;
  };

  using SubresourceLayout2EXT = SubresourceLayout2;
  using SubresourceLayout2KHR = SubresourceLayout2;

  // wrapper struct for struct VkSurfaceCapabilities2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2EXT.html
  struct SurfaceCapabilities2EXT
  {
    using NativeType = VkSurfaceCapabilities2EXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilities2EXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( uint32_t                    minImageCount_            = {},
                                                  uint32_t                    maxImageCount_            = {},
                                                  Extent2D                    currentExtent_            = {},
                                                  Extent2D                    minImageExtent_           = {},
                                                  Extent2D                    maxImageExtent_           = {},
                                                  uint32_t                    maxImageArrayLayers_      = {},
                                                  SurfaceTransformFlagsKHR    supportedTransforms_      = {},
                                                  SurfaceTransformFlagBitsKHR currentTransform_         = SurfaceTransformFlagBitsKHR::eIdentity,
                                                  CompositeAlphaFlagsKHR      supportedCompositeAlpha_  = {},
                                                  ImageUsageFlags             supportedUsageFlags_      = {},
                                                  SurfaceCounterFlagsEXT      supportedSurfaceCounters_ = {},
                                                  void *                      pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minImageCount{ minImageCount_ }
      , maxImageCount{ maxImageCount_ }
      , currentExtent{ currentExtent_ }
      , minImageExtent{ minImageExtent_ }
      , maxImageExtent{ maxImageExtent_ }
      , maxImageArrayLayers{ maxImageArrayLayers_ }
      , supportedTransforms{ supportedTransforms_ }
      , currentTransform{ currentTransform_ }
      , supportedCompositeAlpha{ supportedCompositeAlpha_ }
      , supportedUsageFlags{ supportedUsageFlags_ }
      , supportedSurfaceCounters{ supportedSurfaceCounters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
    {
    }

    SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilities2EXT *>( this );
    }

    operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilities2EXT *>( this );
    }

    operator VkSurfaceCapabilities2EXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilities2EXT *>( this );
    }

    operator VkSurfaceCapabilities2EXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilities2EXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               uint32_t const &,
               uint32_t const &,
               Extent2D const &,
               Extent2D const &,
               Extent2D const &,
               uint32_t const &,
               SurfaceTransformFlagsKHR const &,
               SurfaceTransformFlagBitsKHR const &,
               CompositeAlphaFlagsKHR const &,
               ImageUsageFlags const &,
               SurfaceCounterFlagsEXT const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       minImageCount,
                       maxImageCount,
                       currentExtent,
                       minImageExtent,
                       maxImageExtent,
                       maxImageArrayLayers,
                       supportedTransforms,
                       currentTransform,
                       supportedCompositeAlpha,
                       supportedUsageFlags,
                       supportedSurfaceCounters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
#else
    bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) &&
             ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) &&
             ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) &&
             ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
             ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
#  endif
    }

    bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                    = StructureType::eSurfaceCapabilities2EXT;
    void *                      pNext                    = {};
    uint32_t                    minImageCount            = {};
    uint32_t                    maxImageCount            = {};
    Extent2D                    currentExtent            = {};
    Extent2D                    minImageExtent           = {};
    Extent2D                    maxImageExtent           = {};
    uint32_t                    maxImageArrayLayers      = {};
    SurfaceTransformFlagsKHR    supportedTransforms      = {};
    SurfaceTransformFlagBitsKHR currentTransform         = SurfaceTransformFlagBitsKHR::eIdentity;
    CompositeAlphaFlagsKHR      supportedCompositeAlpha  = {};
    ImageUsageFlags             supportedUsageFlags      = {};
    SurfaceCounterFlagsEXT      supportedSurfaceCounters = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilities2EXT>
  {
    using Type = SurfaceCapabilities2EXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
  {
    using Type = SurfaceCapabilities2EXT;
  };

  // wrapper struct for struct VkSurfaceCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesKHR.html
  struct SurfaceCapabilitiesKHR
  {
    using NativeType = VkSurfaceCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( uint32_t                    minImageCount_           = {},
                                                 uint32_t                    maxImageCount_           = {},
                                                 Extent2D                    currentExtent_           = {},
                                                 Extent2D                    minImageExtent_          = {},
                                                 Extent2D                    maxImageExtent_          = {},
                                                 uint32_t                    maxImageArrayLayers_     = {},
                                                 SurfaceTransformFlagsKHR    supportedTransforms_     = {},
                                                 SurfaceTransformFlagBitsKHR currentTransform_        = SurfaceTransformFlagBitsKHR::eIdentity,
                                                 CompositeAlphaFlagsKHR      supportedCompositeAlpha_ = {},
                                                 ImageUsageFlags             supportedUsageFlags_     = {} ) VULKAN_HPP_NOEXCEPT
      : minImageCount{ minImageCount_ }
      , maxImageCount{ maxImageCount_ }
      , currentExtent{ currentExtent_ }
      , minImageExtent{ minImageExtent_ }
      , maxImageExtent{ maxImageExtent_ }
      , maxImageArrayLayers{ maxImageArrayLayers_ }
      , supportedTransforms{ supportedTransforms_ }
      , currentTransform{ currentTransform_ }
      , supportedCompositeAlpha{ supportedCompositeAlpha_ }
      , supportedUsageFlags{ supportedUsageFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
    {
    }

    SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &,
               uint32_t const &,
               Extent2D const &,
               Extent2D const &,
               Extent2D const &,
               uint32_t const &,
               SurfaceTransformFlagsKHR const &,
               SurfaceTransformFlagBitsKHR const &,
               CompositeAlphaFlagsKHR const &,
               ImageUsageFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( minImageCount,
                       maxImageCount,
                       currentExtent,
                       minImageExtent,
                       maxImageExtent,
                       maxImageArrayLayers,
                       supportedTransforms,
                       currentTransform,
                       supportedCompositeAlpha,
                       supportedUsageFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) &&
             ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
             ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) &&
             ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags );
#  endif
    }

    bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                    minImageCount           = {};
    uint32_t                    maxImageCount           = {};
    Extent2D                    currentExtent           = {};
    Extent2D                    minImageExtent          = {};
    Extent2D                    maxImageExtent          = {};
    uint32_t                    maxImageArrayLayers     = {};
    SurfaceTransformFlagsKHR    supportedTransforms     = {};
    SurfaceTransformFlagBitsKHR currentTransform        = SurfaceTransformFlagBitsKHR::eIdentity;
    CompositeAlphaFlagsKHR      supportedCompositeAlpha = {};
    ImageUsageFlags             supportedUsageFlags     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilitiesKHR>
  {
    using Type = SurfaceCapabilitiesKHR;
  };
#endif

  // wrapper struct for struct VkSurfaceCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2KHR.html
  struct SurfaceCapabilities2KHR
  {
    using NativeType = VkSurfaceCapabilities2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilities2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , surfaceCapabilities{ surfaceCapabilities_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
    {
    }

    SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilities2KHR *>( this );
    }

    operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilities2KHR *>( this );
    }

    operator VkSurfaceCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilities2KHR *>( this );
    }

    operator VkSurfaceCapabilities2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilities2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SurfaceCapabilitiesKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, surfaceCapabilities );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities );
#  endif
    }

    bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType               = StructureType::eSurfaceCapabilities2KHR;
    void *                 pNext               = {};
    SurfaceCapabilitiesKHR surfaceCapabilities = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilities2KHR>
  {
    using Type = SurfaceCapabilities2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
  {
    using Type = SurfaceCapabilities2KHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkSurfaceCapabilitiesFullScreenExclusiveEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesFullScreenExclusiveEXT.html
  struct SurfaceCapabilitiesFullScreenExclusiveEXT
  {
    using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( Bool32 fullScreenExclusiveSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fullScreenExclusiveSupported{ fullScreenExclusiveSupported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
    {
    }

    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fullScreenExclusiveSupported );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default;
#  else
    bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
#    endif
    }

    bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType                        = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
    void *        pNext                        = {};
    Bool32        fullScreenExclusiveSupported = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilitiesFullScreenExclusiveEXT>
  {
    using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
  {
    using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkSurfaceCapabilitiesPresentBarrierNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentBarrierNV.html
  struct SurfaceCapabilitiesPresentBarrierNV
  {
    using NativeType = VkSurfaceCapabilitiesPresentBarrierNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilitiesPresentBarrierNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( Bool32 presentBarrierSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentBarrierSupported{ presentBarrierSupported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs ) )
    {
    }

    SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilitiesPresentBarrierNV const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV *>( this );
    }

    operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV *>( this );
    }

    operator VkSurfaceCapabilitiesPresentBarrierNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilitiesPresentBarrierNV *>( this );
    }

    operator VkSurfaceCapabilitiesPresentBarrierNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilitiesPresentBarrierNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentBarrierSupported );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default;
#else
    bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported );
#  endif
    }

    bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                   = StructureType::eSurfaceCapabilitiesPresentBarrierNV;
    void *        pNext                   = {};
    Bool32        presentBarrierSupported = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilitiesPresentBarrierNV>
  {
    using Type = SurfaceCapabilitiesPresentBarrierNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentBarrierNV>
  {
    using Type = SurfaceCapabilitiesPresentBarrierNV;
  };

  // wrapper struct for struct VkSurfaceCapabilitiesPresentId2KHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentId2KHR.html
  struct SurfaceCapabilitiesPresentId2KHR
  {
    using NativeType = VkSurfaceCapabilitiesPresentId2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilitiesPresentId2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentId2KHR( Bool32 presentId2Supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentId2Supported{ presentId2Supported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentId2KHR( SurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesPresentId2KHR( VkSurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesPresentId2KHR( *reinterpret_cast<SurfaceCapabilitiesPresentId2KHR const *>( &rhs ) )
    {
    }

    SurfaceCapabilitiesPresentId2KHR & operator=( SurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilitiesPresentId2KHR & operator=( VkSurfaceCapabilitiesPresentId2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilitiesPresentId2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentId2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentId2KHR & setPresentId2Supported( Bool32 presentId2Supported_ ) VULKAN_HPP_NOEXCEPT
    {
      presentId2Supported = presentId2Supported_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfaceCapabilitiesPresentId2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesPresentId2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentId2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesPresentId2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentId2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilitiesPresentId2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentId2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilitiesPresentId2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentId2Supported );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesPresentId2KHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilitiesPresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId2Supported == rhs.presentId2Supported );
#  endif
    }

    bool operator!=( SurfaceCapabilitiesPresentId2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eSurfaceCapabilitiesPresentId2KHR;
    void *        pNext               = {};
    Bool32        presentId2Supported = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilitiesPresentId2KHR>
  {
    using Type = SurfaceCapabilitiesPresentId2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentId2KHR>
  {
    using Type = SurfaceCapabilitiesPresentId2KHR;
  };

  // wrapper struct for struct VkSurfaceCapabilitiesPresentWait2KHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentWait2KHR.html
  struct SurfaceCapabilitiesPresentWait2KHR
  {
    using NativeType = VkSurfaceCapabilitiesPresentWait2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCapabilitiesPresentWait2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentWait2KHR( Bool32 presentWait2Supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentWait2Supported{ presentWait2Supported_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentWait2KHR( SurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesPresentWait2KHR( VkSurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesPresentWait2KHR( *reinterpret_cast<SurfaceCapabilitiesPresentWait2KHR const *>( &rhs ) )
    {
    }

    SurfaceCapabilitiesPresentWait2KHR & operator=( SurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCapabilitiesPresentWait2KHR & operator=( VkSurfaceCapabilitiesPresentWait2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCapabilitiesPresentWait2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentWait2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentWait2KHR & setPresentWait2Supported( Bool32 presentWait2Supported_ ) VULKAN_HPP_NOEXCEPT
    {
      presentWait2Supported = presentWait2Supported_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfaceCapabilitiesPresentWait2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesPresentWait2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentWait2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesPresentWait2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentWait2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCapabilitiesPresentWait2KHR *>( this );
    }

    operator VkSurfaceCapabilitiesPresentWait2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCapabilitiesPresentWait2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentWait2Supported );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesPresentWait2KHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilitiesPresentWait2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait2Supported == rhs.presentWait2Supported );
#  endif
    }

    bool operator!=( SurfaceCapabilitiesPresentWait2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::eSurfaceCapabilitiesPresentWait2KHR;
    void *        pNext                 = {};
    Bool32        presentWait2Supported = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCapabilitiesPresentWait2KHR>
  {
    using Type = SurfaceCapabilitiesPresentWait2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesPresentWait2KHR>
  {
    using Type = SurfaceCapabilitiesPresentWait2KHR;
  };

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkSurfaceCreateInfoOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCreateInfoOHOS.html
  struct SurfaceCreateInfoOHOS
  {
    using NativeType = VkSurfaceCreateInfoOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceCreateInfoOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceCreateInfoOHOS( SurfaceCreateFlagsOHOS flags_ = {}, OHNativeWindow * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceCreateInfoOHOS( SurfaceCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCreateInfoOHOS( VkSurfaceCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCreateInfoOHOS( *reinterpret_cast<SurfaceCreateInfoOHOS const *>( &rhs ) )
    {
    }

    SurfaceCreateInfoOHOS & operator=( SurfaceCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceCreateInfoOHOS & operator=( VkSurfaceCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceCreateInfoOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfaceCreateInfoOHOS & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceCreateInfoOHOS & setFlags( SurfaceCreateFlagsOHOS flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceCreateInfoOHOS & setWindow( OHNativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfaceCreateInfoOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( this );
    }

    operator VkSurfaceCreateInfoOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCreateInfoOHOS *>( this );
    }

    operator VkSurfaceCreateInfoOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( this );
    }

    operator VkSurfaceCreateInfoOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceCreateInfoOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceCreateFlagsOHOS const &, OHNativeWindow * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCreateInfoOHOS const & ) const = default;
#  else
    bool operator==( SurfaceCreateInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
#    endif
    }

    bool operator!=( SurfaceCreateInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType          sType  = StructureType::eSurfaceCreateInfoOHOS;
    const void *           pNext  = {};
    SurfaceCreateFlagsOHOS flags  = {};
    OHNativeWindow *       window = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceCreateInfoOHOS>
  {
    using Type = SurfaceCreateInfoOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCreateInfoOHOS>
  {
    using Type = SurfaceCreateInfoOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

  // wrapper struct for struct VkSurfaceFormatKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormatKHR.html
  struct SurfaceFormatKHR
  {
    using NativeType = VkSurfaceFormatKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( Format format_ = Format::eUndefined, ColorSpaceKHR colorSpace_ = ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
      : format{ format_ }
      , colorSpace{ colorSpace_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) ) {}

    SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceFormatKHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFormatKHR *>( this );
    }

    operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFormatKHR *>( this );
    }

    operator VkSurfaceFormatKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceFormatKHR *>( this );
    }

    operator VkSurfaceFormatKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceFormatKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<Format const &, ColorSpaceKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( format, colorSpace );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFormatKHR const & ) const = default;
#else
    bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace );
#  endif
    }

    bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    Format        format     = Format::eUndefined;
    ColorSpaceKHR colorSpace = ColorSpaceKHR::eSrgbNonlinear;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceFormatKHR>
  {
    using Type = SurfaceFormatKHR;
  };
#endif

  // wrapper struct for struct VkSurfaceFormat2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormat2KHR.html
  struct SurfaceFormat2KHR
  {
    using NativeType = VkSurfaceFormat2KHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceFormat2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , surfaceFormat{ surfaceFormat_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) ) {}

    SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFormat2KHR *>( this );
    }

    operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFormat2KHR *>( this );
    }

    operator VkSurfaceFormat2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceFormat2KHR *>( this );
    }

    operator VkSurfaceFormat2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceFormat2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, SurfaceFormatKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, surfaceFormat );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFormat2KHR const & ) const = default;
#else
    bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat );
#  endif
    }

    bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType         = StructureType::eSurfaceFormat2KHR;
    void *           pNext         = {};
    SurfaceFormatKHR surfaceFormat = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceFormat2KHR>
  {
    using Type = SurfaceFormat2KHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
  {
    using Type = SurfaceFormat2KHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkSurfaceFullScreenExclusiveInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveInfoEXT.html
  struct SurfaceFullScreenExclusiveInfoEXT
  {
    using NativeType = VkSurfaceFullScreenExclusiveInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceFullScreenExclusiveInfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( FullScreenExclusiveEXT fullScreenExclusive_ = FullScreenExclusiveEXT::eDefault,
                                                            void *                 pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , fullScreenExclusive{ fullScreenExclusive_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
    {
    }

    SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
    {
      fullScreenExclusive = fullScreenExclusive_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, FullScreenExclusiveEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, fullScreenExclusive );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default;
#  else
    bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive );
#    endif
    }

    bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType          sType               = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
    void *                 pNext               = {};
    FullScreenExclusiveEXT fullScreenExclusive = FullScreenExclusiveEXT::eDefault;
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceFullScreenExclusiveInfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveInfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveInfoEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkSurfaceFullScreenExclusiveWin32InfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveWin32InfoEXT.html
  struct SurfaceFullScreenExclusiveWin32InfoEXT
  {
    using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hmonitor{ hmonitor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
    {
    }

    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
    {
      hmonitor = hmonitor_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, HMONITOR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hmonitor );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default;
#  else
    bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor );
#    endif
    }

    bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType sType    = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
    const void *  pNext    = {};
    HMONITOR      hmonitor = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceFullScreenExclusiveWin32InfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkSurfacePresentModeCompatibilityKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeCompatibilityKHR.html
  struct SurfacePresentModeCompatibilityKHR
  {
    using NativeType = VkSurfacePresentModeCompatibilityKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfacePresentModeCompatibilityKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfacePresentModeCompatibilityKHR( uint32_t presentModeCount_ = {}, PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentModeCount{ presentModeCount_ }
      , pPresentModes{ pPresentModes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityKHR( SurfacePresentModeCompatibilityKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfacePresentModeCompatibilityKHR( VkSurfacePresentModeCompatibilityKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfacePresentModeCompatibilityKHR( *reinterpret_cast<SurfacePresentModeCompatibilityKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SurfacePresentModeCompatibilityKHR( ArrayProxyNoTemporaries<PresentModeKHR> const & presentModes_, void * pNext_ = nullptr )
      : pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SurfacePresentModeCompatibilityKHR & operator=( SurfacePresentModeCompatibilityKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfacePresentModeCompatibilityKHR & operator=( VkSurfacePresentModeCompatibilityKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfacePresentModeCompatibilityKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityKHR & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = presentModeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityKHR & setPPresentModes( PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentModes = pPresentModes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SurfacePresentModeCompatibilityKHR & setPresentModes( ArrayProxyNoTemporaries<PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = static_cast<uint32_t>( presentModes_.size() );
      pPresentModes    = presentModes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfacePresentModeCompatibilityKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfacePresentModeCompatibilityKHR *>( this );
    }

    operator VkSurfacePresentModeCompatibilityKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfacePresentModeCompatibilityKHR *>( this );
    }

    operator VkSurfacePresentModeCompatibilityKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfacePresentModeCompatibilityKHR *>( this );
    }

    operator VkSurfacePresentModeCompatibilityKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfacePresentModeCompatibilityKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, PresentModeKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentModeCount, pPresentModes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfacePresentModeCompatibilityKHR const & ) const = default;
#else
    bool operator==( SurfacePresentModeCompatibilityKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes );
#  endif
    }

    bool operator!=( SurfacePresentModeCompatibilityKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eSurfacePresentModeCompatibilityKHR;
    void *           pNext            = {};
    uint32_t         presentModeCount = {};
    PresentModeKHR * pPresentModes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfacePresentModeCompatibilityKHR>
  {
    using Type = SurfacePresentModeCompatibilityKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfacePresentModeCompatibilityKHR>
  {
    using Type = SurfacePresentModeCompatibilityKHR;
  };

  using SurfacePresentModeCompatibilityEXT = SurfacePresentModeCompatibilityKHR;

  // wrapper struct for struct VkSurfacePresentModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeKHR.html
  struct SurfacePresentModeKHR
  {
    using NativeType = VkSurfacePresentModeKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfacePresentModeKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfacePresentModeKHR( PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentMode{ presentMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfacePresentModeKHR( SurfacePresentModeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfacePresentModeKHR( VkSurfacePresentModeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfacePresentModeKHR( *reinterpret_cast<SurfacePresentModeKHR const *>( &rhs ) )
    {
    }

    SurfacePresentModeKHR & operator=( SurfacePresentModeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfacePresentModeKHR & operator=( VkSurfacePresentModeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfacePresentModeKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeKHR & setPresentMode( PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
    {
      presentMode = presentMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfacePresentModeKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfacePresentModeKHR *>( this );
    }

    operator VkSurfacePresentModeKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfacePresentModeKHR *>( this );
    }

    operator VkSurfacePresentModeKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfacePresentModeKHR *>( this );
    }

    operator VkSurfacePresentModeKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfacePresentModeKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, PresentModeKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfacePresentModeKHR const & ) const = default;
#else
    bool operator==( SurfacePresentModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMode == rhs.presentMode );
#  endif
    }

    bool operator!=( SurfacePresentModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType  sType       = StructureType::eSurfacePresentModeKHR;
    void *         pNext       = {};
    PresentModeKHR presentMode = PresentModeKHR::eImmediate;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfacePresentModeKHR>
  {
    using Type = SurfacePresentModeKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfacePresentModeKHR>
  {
    using Type = SurfacePresentModeKHR;
  };

  using SurfacePresentModeEXT = SurfacePresentModeKHR;

  // wrapper struct for struct VkSurfacePresentScalingCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentScalingCapabilitiesKHR.html
  struct SurfacePresentScalingCapabilitiesKHR
  {
    using NativeType = VkSurfacePresentScalingCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfacePresentScalingCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesKHR( PresentScalingFlagsKHR supportedPresentScaling_  = {},
                                                               PresentGravityFlagsKHR supportedPresentGravityX_ = {},
                                                               PresentGravityFlagsKHR supportedPresentGravityY_ = {},
                                                               Extent2D               minScaledImageExtent_     = {},
                                                               Extent2D               maxScaledImageExtent_     = {},
                                                               void *                 pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportedPresentScaling{ supportedPresentScaling_ }
      , supportedPresentGravityX{ supportedPresentGravityX_ }
      , supportedPresentGravityY{ supportedPresentGravityY_ }
      , minScaledImageExtent{ minScaledImageExtent_ }
      , maxScaledImageExtent{ maxScaledImageExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesKHR( SurfacePresentScalingCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfacePresentScalingCapabilitiesKHR( VkSurfacePresentScalingCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfacePresentScalingCapabilitiesKHR( *reinterpret_cast<SurfacePresentScalingCapabilitiesKHR const *>( &rhs ) )
    {
    }

    SurfacePresentScalingCapabilitiesKHR & operator=( SurfacePresentScalingCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfacePresentScalingCapabilitiesKHR & operator=( VkSurfacePresentScalingCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfacePresentScalingCapabilitiesKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR &
      setSupportedPresentScaling( PresentScalingFlagsKHR supportedPresentScaling_ ) VULKAN_HPP_NOEXCEPT
    {
      supportedPresentScaling = supportedPresentScaling_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR &
      setSupportedPresentGravityX( PresentGravityFlagsKHR supportedPresentGravityX_ ) VULKAN_HPP_NOEXCEPT
    {
      supportedPresentGravityX = supportedPresentGravityX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR &
      setSupportedPresentGravityY( PresentGravityFlagsKHR supportedPresentGravityY_ ) VULKAN_HPP_NOEXCEPT
    {
      supportedPresentGravityY = supportedPresentGravityY_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR & setMinScaledImageExtent( Extent2D const & minScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      minScaledImageExtent = minScaledImageExtent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesKHR & setMaxScaledImageExtent( Extent2D const & maxScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      maxScaledImageExtent = maxScaledImageExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSurfacePresentScalingCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfacePresentScalingCapabilitiesKHR *>( this );
    }

    operator VkSurfacePresentScalingCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfacePresentScalingCapabilitiesKHR *>( this );
    }

    operator VkSurfacePresentScalingCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfacePresentScalingCapabilitiesKHR *>( this );
    }

    operator VkSurfacePresentScalingCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfacePresentScalingCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               PresentScalingFlagsKHR const &,
               PresentGravityFlagsKHR const &,
               PresentGravityFlagsKHR const &,
               Extent2D const &,
               Extent2D const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfacePresentScalingCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SurfacePresentScalingCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedPresentScaling == rhs.supportedPresentScaling ) &&
             ( supportedPresentGravityX == rhs.supportedPresentGravityX ) && ( supportedPresentGravityY == rhs.supportedPresentGravityY ) &&
             ( minScaledImageExtent == rhs.minScaledImageExtent ) && ( maxScaledImageExtent == rhs.maxScaledImageExtent );
#  endif
    }

    bool operator!=( SurfacePresentScalingCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType                    = StructureType::eSurfacePresentScalingCapabilitiesKHR;
    void *                 pNext                    = {};
    PresentScalingFlagsKHR supportedPresentScaling  = {};
    PresentGravityFlagsKHR supportedPresentGravityX = {};
    PresentGravityFlagsKHR supportedPresentGravityY = {};
    Extent2D               minScaledImageExtent     = {};
    Extent2D               maxScaledImageExtent     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfacePresentScalingCapabilitiesKHR>
  {
    using Type = SurfacePresentScalingCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfacePresentScalingCapabilitiesKHR>
  {
    using Type = SurfacePresentScalingCapabilitiesKHR;
  };

  using SurfacePresentScalingCapabilitiesEXT = SurfacePresentScalingCapabilitiesKHR;

  // wrapper struct for struct VkSurfaceProtectedCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceProtectedCapabilitiesKHR.html
  struct SurfaceProtectedCapabilitiesKHR
  {
    using NativeType = VkSurfaceProtectedCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSurfaceProtectedCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( Bool32 supportsProtected_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportsProtected{ supportsProtected_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
    {
    }

    SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

    operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

    operator VkSurfaceProtectedCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

    operator VkSurfaceProtectedCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportsProtected );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected );
#  endif
    }

    bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eSurfaceProtectedCapabilitiesKHR;
    void *        pNext             = {};
    Bool32        supportsProtected = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSurfaceProtectedCapabilitiesKHR>
  {
    using Type = SurfaceProtectedCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
  {
    using Type = SurfaceProtectedCapabilitiesKHR;
  };

  // wrapper struct for struct VkSwapchainCalibratedTimestampInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCalibratedTimestampInfoEXT.html
  struct SwapchainCalibratedTimestampInfoEXT
  {
    using NativeType = VkSwapchainCalibratedTimestampInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainCalibratedTimestampInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainCalibratedTimestampInfoEXT( SwapchainKHR         swapchain_    = {},
                                                              PresentStageFlagsEXT presentStage_ = {},
                                                              uint64_t             timeDomainId_ = {},
                                                              const void *         pNext_        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchain{ swapchain_ }
      , presentStage{ presentStage_ }
      , timeDomainId{ timeDomainId_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainCalibratedTimestampInfoEXT( SwapchainCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCalibratedTimestampInfoEXT( VkSwapchainCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainCalibratedTimestampInfoEXT( *reinterpret_cast<SwapchainCalibratedTimestampInfoEXT const *>( &rhs ) )
    {
    }

    SwapchainCalibratedTimestampInfoEXT & operator=( SwapchainCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainCalibratedTimestampInfoEXT & operator=( VkSwapchainCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainCalibratedTimestampInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainCalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCalibratedTimestampInfoEXT & setSwapchain( SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCalibratedTimestampInfoEXT & setPresentStage( PresentStageFlagsEXT presentStage_ ) VULKAN_HPP_NOEXCEPT
    {
      presentStage = presentStage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCalibratedTimestampInfoEXT & setTimeDomainId( uint64_t timeDomainId_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainId = timeDomainId_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainCalibratedTimestampInfoEXT *>( this );
    }

    operator VkSwapchainCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainCalibratedTimestampInfoEXT *>( this );
    }

    operator VkSwapchainCalibratedTimestampInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainCalibratedTimestampInfoEXT *>( this );
    }

    operator VkSwapchainCalibratedTimestampInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainCalibratedTimestampInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainKHR const &, PresentStageFlagsEXT const &, uint64_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchain, presentStage, timeDomainId );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainCalibratedTimestampInfoEXT const & ) const = default;
#else
    bool operator==( SwapchainCalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( presentStage == rhs.presentStage ) &&
             ( timeDomainId == rhs.timeDomainId );
#  endif
    }

    bool operator!=( SwapchainCalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType        sType        = StructureType::eSwapchainCalibratedTimestampInfoEXT;
    const void *         pNext        = {};
    SwapchainKHR         swapchain    = {};
    PresentStageFlagsEXT presentStage = {};
    uint64_t             timeDomainId = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainCalibratedTimestampInfoEXT>
  {
    using Type = SwapchainCalibratedTimestampInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainCalibratedTimestampInfoEXT>
  {
    using Type = SwapchainCalibratedTimestampInfoEXT;
  };

  // wrapper struct for struct VkSwapchainCounterCreateInfoEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCounterCreateInfoEXT.html
  struct SwapchainCounterCreateInfoEXT
  {
    using NativeType = VkSwapchainCounterCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainCounterCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , surfaceCounters{ surfaceCounters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
    {
    }

    SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
    {
      surfaceCounters = surfaceCounters_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>( this );
    }

    operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>( this );
    }

    operator VkSwapchainCounterCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>( this );
    }

    operator VkSwapchainCounterCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SurfaceCounterFlagsEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, surfaceCounters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
#else
    bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters );
#  endif
    }

    bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType           = StructureType::eSwapchainCounterCreateInfoEXT;
    const void *           pNext           = {};
    SurfaceCounterFlagsEXT surfaceCounters = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainCounterCreateInfoEXT>
  {
    using Type = SwapchainCounterCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
  {
    using Type = SwapchainCounterCreateInfoEXT;
  };

  // wrapper struct for struct VkSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCreateInfoKHR.html
  struct SwapchainCreateInfoKHR
  {
    using NativeType = VkSwapchainCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR     flags_                 = {},
                                                 SurfaceKHR                  surface_               = {},
                                                 uint32_t                    minImageCount_         = {},
                                                 Format                      imageFormat_           = Format::eUndefined,
                                                 ColorSpaceKHR               imageColorSpace_       = ColorSpaceKHR::eSrgbNonlinear,
                                                 Extent2D                    imageExtent_           = {},
                                                 uint32_t                    imageArrayLayers_      = {},
                                                 ImageUsageFlags             imageUsage_            = {},
                                                 SharingMode                 imageSharingMode_      = SharingMode::eExclusive,
                                                 uint32_t                    queueFamilyIndexCount_ = {},
                                                 const uint32_t *            pQueueFamilyIndices_   = {},
                                                 SurfaceTransformFlagBitsKHR preTransform_          = SurfaceTransformFlagBitsKHR::eIdentity,
                                                 CompositeAlphaFlagBitsKHR   compositeAlpha_        = CompositeAlphaFlagBitsKHR::eOpaque,
                                                 PresentModeKHR              presentMode_           = PresentModeKHR::eImmediate,
                                                 Bool32                      clipped_               = {},
                                                 SwapchainKHR                oldSwapchain_          = {},
                                                 const void *                pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , surface{ surface_ }
      , minImageCount{ minImageCount_ }
      , imageFormat{ imageFormat_ }
      , imageColorSpace{ imageColorSpace_ }
      , imageExtent{ imageExtent_ }
      , imageArrayLayers{ imageArrayLayers_ }
      , imageUsage{ imageUsage_ }
      , imageSharingMode{ imageSharingMode_ }
      , queueFamilyIndexCount{ queueFamilyIndexCount_ }
      , pQueueFamilyIndices{ pQueueFamilyIndices_ }
      , preTransform{ preTransform_ }
      , compositeAlpha{ compositeAlpha_ }
      , presentMode{ presentMode_ }
      , clipped{ clipped_ }
      , oldSwapchain{ oldSwapchain_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR                         flags_,
                            SurfaceKHR                                      surface_,
                            uint32_t                                        minImageCount_,
                            Format                                          imageFormat_,
                            ColorSpaceKHR                                   imageColorSpace_,
                            Extent2D                                        imageExtent_,
                            uint32_t                                        imageArrayLayers_,
                            ImageUsageFlags                                 imageUsage_,
                            SharingMode                                     imageSharingMode_,
                            ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                            SurfaceTransformFlagBitsKHR                     preTransform_   = SurfaceTransformFlagBitsKHR::eIdentity,
                            CompositeAlphaFlagBitsKHR                       compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque,
                            PresentModeKHR                                  presentMode_    = PresentModeKHR::eImmediate,
                            Bool32                                          clipped_        = {},
                            SwapchainKHR                                    oldSwapchain_   = {},
                            const void *                                    pNext_          = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , surface( surface_ )
      , minImageCount( minImageCount_ )
      , imageFormat( imageFormat_ )
      , imageColorSpace( imageColorSpace_ )
      , imageExtent( imageExtent_ )
      , imageArrayLayers( imageArrayLayers_ )
      , imageUsage( imageUsage_ )
      , imageSharingMode( imageSharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
      , preTransform( preTransform_ )
      , compositeAlpha( compositeAlpha_ )
      , presentMode( presentMode_ )
      , clipped( clipped_ )
      , oldSwapchain( oldSwapchain_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      minImageCount = minImageCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      imageFormat = imageFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
    {
      imageColorSpace = imageColorSpace_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      imageArrayLayers = imageArrayLayers_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
    {
      imageUsage = imageUsage_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSharingMode = imageSharingMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainCreateInfoKHR & setQueueFamilyIndices( ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
    {
      preTransform = preTransform_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
    {
      compositeAlpha = compositeAlpha_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
    {
      presentMode = presentMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
    {
      clipped = clipped_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      oldSwapchain = oldSwapchain_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainCreateInfoKHR *>( this );
    }

    operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainCreateInfoKHR *>( this );
    }

    operator VkSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainCreateInfoKHR *>( this );
    }

    operator VkSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               SwapchainCreateFlagsKHR const &,
               SurfaceKHR const &,
               uint32_t const &,
               Format const &,
               ColorSpaceKHR const &,
               Extent2D const &,
               uint32_t const &,
               ImageUsageFlags const &,
               SharingMode const &,
               uint32_t const &,
               const uint32_t * const &,
               SurfaceTransformFlagBitsKHR const &,
               CompositeAlphaFlagBitsKHR const &,
               PresentModeKHR const &,
               Bool32 const &,
               SwapchainKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       surface,
                       minImageCount,
                       imageFormat,
                       imageColorSpace,
                       imageExtent,
                       imageArrayLayers,
                       imageUsage,
                       imageSharingMode,
                       queueFamilyIndexCount,
                       pQueueFamilyIndices,
                       preTransform,
                       compositeAlpha,
                       presentMode,
                       clipped,
                       oldSwapchain );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) &&
             ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) &&
             ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) &&
             ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
             ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) &&
             ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain );
#  endif
    }

    bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                 = StructureType::eSwapchainCreateInfoKHR;
    const void *                pNext                 = {};
    SwapchainCreateFlagsKHR     flags                 = {};
    SurfaceKHR                  surface               = {};
    uint32_t                    minImageCount         = {};
    Format                      imageFormat           = Format::eUndefined;
    ColorSpaceKHR               imageColorSpace       = ColorSpaceKHR::eSrgbNonlinear;
    Extent2D                    imageExtent           = {};
    uint32_t                    imageArrayLayers      = {};
    ImageUsageFlags             imageUsage            = {};
    SharingMode                 imageSharingMode      = SharingMode::eExclusive;
    uint32_t                    queueFamilyIndexCount = {};
    const uint32_t *            pQueueFamilyIndices   = {};
    SurfaceTransformFlagBitsKHR preTransform          = SurfaceTransformFlagBitsKHR::eIdentity;
    CompositeAlphaFlagBitsKHR   compositeAlpha        = CompositeAlphaFlagBitsKHR::eOpaque;
    PresentModeKHR              presentMode           = PresentModeKHR::eImmediate;
    Bool32                      clipped               = {};
    SwapchainKHR                oldSwapchain          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainCreateInfoKHR>
  {
    using Type = SwapchainCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
  {
    using Type = SwapchainCreateInfoKHR;
  };

  // wrapper struct for struct VkSwapchainDisplayNativeHdrCreateInfoAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainDisplayNativeHdrCreateInfoAMD.html
  struct SwapchainDisplayNativeHdrCreateInfoAMD
  {
    using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( Bool32 localDimmingEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , localDimmingEnable{ localDimmingEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
    {
    }

    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      localDimmingEnable = localDimmingEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, localDimmingEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default;
#else
    bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable );
#  endif
    }

    bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
    const void *  pNext              = {};
    Bool32        localDimmingEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainDisplayNativeHdrCreateInfoAMD>
  {
    using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
  {
    using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
  };

#if defined( VK_USE_PLATFORM_OHOS )
  // wrapper struct for struct VkSwapchainImageCreateInfoOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainImageCreateInfoOHOS.html
  struct SwapchainImageCreateInfoOHOS
  {
    using NativeType = VkSwapchainImageCreateInfoOHOS;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainImageCreateInfoOHOS;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainImageCreateInfoOHOS( SwapchainImageUsageFlagsOHOS usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , usage{ usage_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainImageCreateInfoOHOS( SwapchainImageCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainImageCreateInfoOHOS( VkSwapchainImageCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainImageCreateInfoOHOS( *reinterpret_cast<SwapchainImageCreateInfoOHOS const *>( &rhs ) )
    {
    }

    SwapchainImageCreateInfoOHOS & operator=( SwapchainImageCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainImageCreateInfoOHOS & operator=( VkSwapchainImageCreateInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainImageCreateInfoOHOS const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainImageCreateInfoOHOS & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainImageCreateInfoOHOS & setUsage( SwapchainImageUsageFlagsOHOS usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainImageCreateInfoOHOS const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainImageCreateInfoOHOS *>( this );
    }

    operator VkSwapchainImageCreateInfoOHOS &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainImageCreateInfoOHOS *>( this );
    }

    operator VkSwapchainImageCreateInfoOHOS const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainImageCreateInfoOHOS *>( this );
    }

    operator VkSwapchainImageCreateInfoOHOS *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainImageCreateInfoOHOS *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, SwapchainImageUsageFlagsOHOS const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, usage );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainImageCreateInfoOHOS const & ) const = default;
#  else
    bool operator==( SwapchainImageCreateInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
#    endif
    }

    bool operator!=( SwapchainImageCreateInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                sType = StructureType::eSwapchainImageCreateInfoOHOS;
    const void *                 pNext = {};
    SwapchainImageUsageFlagsOHOS usage = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainImageCreateInfoOHOS>
  {
    using Type = SwapchainImageCreateInfoOHOS;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainImageCreateInfoOHOS>
  {
    using Type = SwapchainImageCreateInfoOHOS;
  };
#endif /*VK_USE_PLATFORM_OHOS*/

  // wrapper struct for struct VkSwapchainLatencyCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainLatencyCreateInfoNV.html
  struct SwapchainLatencyCreateInfoNV
  {
    using NativeType = VkSwapchainLatencyCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainLatencyCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , latencyModeEnable{ latencyModeEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainLatencyCreateInfoNV( *reinterpret_cast<SwapchainLatencyCreateInfoNV const *>( &rhs ) )
    {
    }

    SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainLatencyCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      latencyModeEnable = latencyModeEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainLatencyCreateInfoNV *>( this );
    }

    operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainLatencyCreateInfoNV *>( this );
    }

    operator VkSwapchainLatencyCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainLatencyCreateInfoNV *>( this );
    }

    operator VkSwapchainLatencyCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainLatencyCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, latencyModeEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default;
#else
    bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( latencyModeEnable == rhs.latencyModeEnable );
#  endif
    }

    bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType             = StructureType::eSwapchainLatencyCreateInfoNV;
    const void *  pNext             = {};
    Bool32        latencyModeEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainLatencyCreateInfoNV>
  {
    using Type = SwapchainLatencyCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainLatencyCreateInfoNV>
  {
    using Type = SwapchainLatencyCreateInfoNV;
  };

  // wrapper struct for struct VkSwapchainPresentBarrierCreateInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentBarrierCreateInfoNV.html
  struct SwapchainPresentBarrierCreateInfoNV
  {
    using NativeType = VkSwapchainPresentBarrierCreateInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainPresentBarrierCreateInfoNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( Bool32 presentBarrierEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentBarrierEnable{ presentBarrierEnable_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs ) )
    {
    }

    SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainPresentBarrierCreateInfoNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPresentBarrierEnable( Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      presentBarrierEnable = presentBarrierEnable_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV *>( this );
    }

    operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV *>( this );
    }

    operator VkSwapchainPresentBarrierCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainPresentBarrierCreateInfoNV *>( this );
    }

    operator VkSwapchainPresentBarrierCreateInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainPresentBarrierCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentBarrierEnable );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default;
#else
    bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable );
#  endif
    }

    bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eSwapchainPresentBarrierCreateInfoNV;
    void *        pNext                = {};
    Bool32        presentBarrierEnable = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainPresentBarrierCreateInfoNV>
  {
    using Type = SwapchainPresentBarrierCreateInfoNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainPresentBarrierCreateInfoNV>
  {
    using Type = SwapchainPresentBarrierCreateInfoNV;
  };

  // wrapper struct for struct VkSwapchainPresentFenceInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentFenceInfoKHR.html
  struct SwapchainPresentFenceInfoKHR
  {
    using NativeType = VkSwapchainPresentFenceInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainPresentFenceInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SwapchainPresentFenceInfoKHR( uint32_t swapchainCount_ = {}, const Fence * pFences_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pFences{ pFences_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoKHR( SwapchainPresentFenceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainPresentFenceInfoKHR( VkSwapchainPresentFenceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainPresentFenceInfoKHR( *reinterpret_cast<SwapchainPresentFenceInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentFenceInfoKHR( ArrayProxyNoTemporaries<const Fence> const & fences_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( fences_.size() ) ), pFences( fences_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainPresentFenceInfoKHR & operator=( SwapchainPresentFenceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainPresentFenceInfoKHR & operator=( VkSwapchainPresentFenceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainPresentFenceInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoKHR & setPFences( const Fence * pFences_ ) VULKAN_HPP_NOEXCEPT
    {
      pFences = pFences_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentFenceInfoKHR & setFences( ArrayProxyNoTemporaries<const Fence> const & fences_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( fences_.size() );
      pFences        = fences_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainPresentFenceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainPresentFenceInfoKHR *>( this );
    }

    operator VkSwapchainPresentFenceInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainPresentFenceInfoKHR *>( this );
    }

    operator VkSwapchainPresentFenceInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainPresentFenceInfoKHR *>( this );
    }

    operator VkSwapchainPresentFenceInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainPresentFenceInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const Fence * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pFences );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainPresentFenceInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainPresentFenceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pFences == rhs.pFences );
#  endif
    }

    bool operator!=( SwapchainPresentFenceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eSwapchainPresentFenceInfoKHR;
    const void *  pNext          = {};
    uint32_t      swapchainCount = {};
    const Fence * pFences        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainPresentFenceInfoKHR>
  {
    using Type = SwapchainPresentFenceInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainPresentFenceInfoKHR>
  {
    using Type = SwapchainPresentFenceInfoKHR;
  };

  using SwapchainPresentFenceInfoEXT = SwapchainPresentFenceInfoKHR;

  // wrapper struct for struct VkSwapchainPresentModeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModeInfoKHR.html
  struct SwapchainPresentModeInfoKHR
  {
    using NativeType = VkSwapchainPresentModeInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainPresentModeInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoKHR( uint32_t               swapchainCount_ = {},
                                                      const PresentModeKHR * pPresentModes_  = {},
                                                      const void *           pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , swapchainCount{ swapchainCount_ }
      , pPresentModes{ pPresentModes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoKHR( SwapchainPresentModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainPresentModeInfoKHR( VkSwapchainPresentModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainPresentModeInfoKHR( *reinterpret_cast<SwapchainPresentModeInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentModeInfoKHR( ArrayProxyNoTemporaries<const PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), swapchainCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainPresentModeInfoKHR & operator=( SwapchainPresentModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainPresentModeInfoKHR & operator=( VkSwapchainPresentModeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainPresentModeInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoKHR & setPPresentModes( const PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentModes = pPresentModes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentModeInfoKHR & setPresentModes( ArrayProxyNoTemporaries<const PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( presentModes_.size() );
      pPresentModes  = presentModes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainPresentModeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainPresentModeInfoKHR *>( this );
    }

    operator VkSwapchainPresentModeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainPresentModeInfoKHR *>( this );
    }

    operator VkSwapchainPresentModeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainPresentModeInfoKHR *>( this );
    }

    operator VkSwapchainPresentModeInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainPresentModeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PresentModeKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, swapchainCount, pPresentModes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainPresentModeInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainPresentModeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentModes == rhs.pPresentModes );
#  endif
    }

    bool operator!=( SwapchainPresentModeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType          = StructureType::eSwapchainPresentModeInfoKHR;
    const void *           pNext          = {};
    uint32_t               swapchainCount = {};
    const PresentModeKHR * pPresentModes  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainPresentModeInfoKHR>
  {
    using Type = SwapchainPresentModeInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainPresentModeInfoKHR>
  {
    using Type = SwapchainPresentModeInfoKHR;
  };

  using SwapchainPresentModeInfoEXT = SwapchainPresentModeInfoKHR;

  // wrapper struct for struct VkSwapchainPresentModesCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModesCreateInfoKHR.html
  struct SwapchainPresentModesCreateInfoKHR
  {
    using NativeType = VkSwapchainPresentModesCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainPresentModesCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoKHR( uint32_t               presentModeCount_ = {},
                                                             const PresentModeKHR * pPresentModes_    = {},
                                                             const void *           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , presentModeCount{ presentModeCount_ }
      , pPresentModes{ pPresentModes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoKHR( SwapchainPresentModesCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainPresentModesCreateInfoKHR( VkSwapchainPresentModesCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainPresentModesCreateInfoKHR( *reinterpret_cast<SwapchainPresentModesCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentModesCreateInfoKHR( ArrayProxyNoTemporaries<const PresentModeKHR> const & presentModes_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), presentModeCount( static_cast<uint32_t>( presentModes_.size() ) ), pPresentModes( presentModes_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainPresentModesCreateInfoKHR & operator=( SwapchainPresentModesCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainPresentModesCreateInfoKHR & operator=( VkSwapchainPresentModesCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainPresentModesCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoKHR & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = presentModeCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoKHR & setPPresentModes( const PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPresentModes = pPresentModes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainPresentModesCreateInfoKHR & setPresentModes( ArrayProxyNoTemporaries<const PresentModeKHR> const & presentModes_ ) VULKAN_HPP_NOEXCEPT
    {
      presentModeCount = static_cast<uint32_t>( presentModes_.size() );
      pPresentModes    = presentModes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainPresentModesCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainPresentModesCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentModesCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainPresentModesCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentModesCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainPresentModesCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentModesCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainPresentModesCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const PresentModeKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, presentModeCount, pPresentModes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainPresentModesCreateInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainPresentModesCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes );
#  endif
    }

    bool operator!=( SwapchainPresentModesCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType            = StructureType::eSwapchainPresentModesCreateInfoKHR;
    const void *           pNext            = {};
    uint32_t               presentModeCount = {};
    const PresentModeKHR * pPresentModes    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainPresentModesCreateInfoKHR>
  {
    using Type = SwapchainPresentModesCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainPresentModesCreateInfoKHR>
  {
    using Type = SwapchainPresentModesCreateInfoKHR;
  };

  using SwapchainPresentModesCreateInfoEXT = SwapchainPresentModesCreateInfoKHR;

  // wrapper struct for struct VkSwapchainPresentScalingCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentScalingCreateInfoKHR.html
  struct SwapchainPresentScalingCreateInfoKHR
  {
    using NativeType = VkSwapchainPresentScalingCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainPresentScalingCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoKHR( PresentScalingFlagsKHR scalingBehavior_ = {},
                                                               PresentGravityFlagsKHR presentGravityX_ = {},
                                                               PresentGravityFlagsKHR presentGravityY_ = {},
                                                               const void *           pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , scalingBehavior{ scalingBehavior_ }
      , presentGravityX{ presentGravityX_ }
      , presentGravityY{ presentGravityY_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoKHR( SwapchainPresentScalingCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainPresentScalingCreateInfoKHR( VkSwapchainPresentScalingCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainPresentScalingCreateInfoKHR( *reinterpret_cast<SwapchainPresentScalingCreateInfoKHR const *>( &rhs ) )
    {
    }

    SwapchainPresentScalingCreateInfoKHR & operator=( SwapchainPresentScalingCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainPresentScalingCreateInfoKHR & operator=( VkSwapchainPresentScalingCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainPresentScalingCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoKHR & setScalingBehavior( PresentScalingFlagsKHR scalingBehavior_ ) VULKAN_HPP_NOEXCEPT
    {
      scalingBehavior = scalingBehavior_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoKHR & setPresentGravityX( PresentGravityFlagsKHR presentGravityX_ ) VULKAN_HPP_NOEXCEPT
    {
      presentGravityX = presentGravityX_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoKHR & setPresentGravityY( PresentGravityFlagsKHR presentGravityY_ ) VULKAN_HPP_NOEXCEPT
    {
      presentGravityY = presentGravityY_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainPresentScalingCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainPresentScalingCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentScalingCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainPresentScalingCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentScalingCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainPresentScalingCreateInfoKHR *>( this );
    }

    operator VkSwapchainPresentScalingCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainPresentScalingCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, PresentScalingFlagsKHR const &, PresentGravityFlagsKHR const &, PresentGravityFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainPresentScalingCreateInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainPresentScalingCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalingBehavior == rhs.scalingBehavior ) && ( presentGravityX == rhs.presentGravityX ) &&
             ( presentGravityY == rhs.presentGravityY );
#  endif
    }

    bool operator!=( SwapchainPresentScalingCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType           = StructureType::eSwapchainPresentScalingCreateInfoKHR;
    const void *           pNext           = {};
    PresentScalingFlagsKHR scalingBehavior = {};
    PresentGravityFlagsKHR presentGravityX = {};
    PresentGravityFlagsKHR presentGravityY = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainPresentScalingCreateInfoKHR>
  {
    using Type = SwapchainPresentScalingCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainPresentScalingCreateInfoKHR>
  {
    using Type = SwapchainPresentScalingCreateInfoKHR;
  };

  using SwapchainPresentScalingCreateInfoEXT = SwapchainPresentScalingCreateInfoKHR;

  // wrapper struct for struct VkSwapchainTimeDomainPropertiesEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainTimeDomainPropertiesEXT.html
  struct SwapchainTimeDomainPropertiesEXT
  {
    using NativeType = VkSwapchainTimeDomainPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainTimeDomainPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainTimeDomainPropertiesEXT( uint32_t        timeDomainCount_ = {},
                                                           TimeDomainKHR * pTimeDomains_    = {},
                                                           uint64_t *      pTimeDomainIds_  = {},
                                                           void *          pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , timeDomainCount{ timeDomainCount_ }
      , pTimeDomains{ pTimeDomains_ }
      , pTimeDomainIds{ pTimeDomainIds_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainTimeDomainPropertiesEXT( SwapchainTimeDomainPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainTimeDomainPropertiesEXT( VkSwapchainTimeDomainPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainTimeDomainPropertiesEXT( *reinterpret_cast<SwapchainTimeDomainPropertiesEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainTimeDomainPropertiesEXT( ArrayProxyNoTemporaries<TimeDomainKHR> const & timeDomains_,
                                      ArrayProxyNoTemporaries<uint64_t> const &      timeDomainIds_ = {},
                                      void *                                         pNext_         = nullptr )
      : pNext( pNext_ )
      , timeDomainCount( static_cast<uint32_t>( timeDomains_.size() ) )
      , pTimeDomains( timeDomains_.data() )
      , pTimeDomainIds( timeDomainIds_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( timeDomains_.empty() || timeDomainIds_.empty() || ( timeDomains_.size() == timeDomainIds_.size() ) );
#    else
      if ( !timeDomains_.empty() && !timeDomainIds_.empty() && ( timeDomains_.size() != timeDomainIds_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::SwapchainTimeDomainPropertiesEXT::SwapchainTimeDomainPropertiesEXT: !timeDomains_.empty() && !timeDomainIds_.empty() && ( timeDomains_.size() != timeDomainIds_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainTimeDomainPropertiesEXT & operator=( SwapchainTimeDomainPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainTimeDomainPropertiesEXT & operator=( VkSwapchainTimeDomainPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainTimeDomainPropertiesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainTimeDomainPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainTimeDomainPropertiesEXT & setTimeDomainCount( uint32_t timeDomainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainCount = timeDomainCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainTimeDomainPropertiesEXT & setPTimeDomains( TimeDomainKHR * pTimeDomains_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimeDomains = pTimeDomains_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainTimeDomainPropertiesEXT & setTimeDomains( ArrayProxyNoTemporaries<TimeDomainKHR> const & timeDomains_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainCount = static_cast<uint32_t>( timeDomains_.size() );
      pTimeDomains    = timeDomains_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 SwapchainTimeDomainPropertiesEXT & setPTimeDomainIds( uint64_t * pTimeDomainIds_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimeDomainIds = pTimeDomainIds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainTimeDomainPropertiesEXT & setTimeDomainIds( ArrayProxyNoTemporaries<uint64_t> const & timeDomainIds_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomainCount = static_cast<uint32_t>( timeDomainIds_.size() );
      pTimeDomainIds  = timeDomainIds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainTimeDomainPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainTimeDomainPropertiesEXT *>( this );
    }

    operator VkSwapchainTimeDomainPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainTimeDomainPropertiesEXT *>( this );
    }

    operator VkSwapchainTimeDomainPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainTimeDomainPropertiesEXT *>( this );
    }

    operator VkSwapchainTimeDomainPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainTimeDomainPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, TimeDomainKHR * const &, uint64_t * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, timeDomainCount, pTimeDomains, pTimeDomainIds );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainTimeDomainPropertiesEXT const & ) const = default;
#else
    bool operator==( SwapchainTimeDomainPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomainCount == rhs.timeDomainCount ) && ( pTimeDomains == rhs.pTimeDomains ) &&
             ( pTimeDomainIds == rhs.pTimeDomainIds );
#  endif
    }

    bool operator!=( SwapchainTimeDomainPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType           = StructureType::eSwapchainTimeDomainPropertiesEXT;
    void *          pNext           = {};
    uint32_t        timeDomainCount = {};
    TimeDomainKHR * pTimeDomains    = {};
    uint64_t *      pTimeDomainIds  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainTimeDomainPropertiesEXT>
  {
    using Type = SwapchainTimeDomainPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainTimeDomainPropertiesEXT>
  {
    using Type = SwapchainTimeDomainPropertiesEXT;
  };

  // wrapper struct for struct VkSwapchainTimingPropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainTimingPropertiesEXT.html
  struct SwapchainTimingPropertiesEXT
  {
    using NativeType = VkSwapchainTimingPropertiesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eSwapchainTimingPropertiesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SwapchainTimingPropertiesEXT( uint64_t refreshDuration_ = {}, uint64_t refreshInterval_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , refreshDuration{ refreshDuration_ }
      , refreshInterval{ refreshInterval_ }
    {
    }

    VULKAN_HPP_CONSTEXPR SwapchainTimingPropertiesEXT( SwapchainTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainTimingPropertiesEXT( VkSwapchainTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainTimingPropertiesEXT( *reinterpret_cast<SwapchainTimingPropertiesEXT const *>( &rhs ) )
    {
    }

    SwapchainTimingPropertiesEXT & operator=( SwapchainTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    SwapchainTimingPropertiesEXT & operator=( VkSwapchainTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<SwapchainTimingPropertiesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 SwapchainTimingPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainTimingPropertiesEXT & setRefreshDuration( uint64_t refreshDuration_ ) VULKAN_HPP_NOEXCEPT
    {
      refreshDuration = refreshDuration_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 SwapchainTimingPropertiesEXT & setRefreshInterval( uint64_t refreshInterval_ ) VULKAN_HPP_NOEXCEPT
    {
      refreshInterval = refreshInterval_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkSwapchainTimingPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainTimingPropertiesEXT *>( this );
    }

    operator VkSwapchainTimingPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainTimingPropertiesEXT *>( this );
    }

    operator VkSwapchainTimingPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkSwapchainTimingPropertiesEXT *>( this );
    }

    operator VkSwapchainTimingPropertiesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkSwapchainTimingPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, refreshDuration, refreshInterval );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainTimingPropertiesEXT const & ) const = default;
#else
    bool operator==( SwapchainTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( refreshDuration == rhs.refreshDuration ) && ( refreshInterval == rhs.refreshInterval );
#  endif
    }

    bool operator!=( SwapchainTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType           = StructureType::eSwapchainTimingPropertiesEXT;
    void *        pNext           = {};
    uint64_t      refreshDuration = {};
    uint64_t      refreshInterval = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkSwapchainTimingPropertiesEXT>
  {
    using Type = SwapchainTimingPropertiesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eSwapchainTimingPropertiesEXT>
  {
    using Type = SwapchainTimingPropertiesEXT;
  };

  // wrapper struct for struct VkTensorCaptureDescriptorDataInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCaptureDescriptorDataInfoARM.html
  struct TensorCaptureDescriptorDataInfoARM
  {
    using NativeType = VkTensorCaptureDescriptorDataInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorCaptureDescriptorDataInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorCaptureDescriptorDataInfoARM( TensorARM tensor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensor{ tensor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorCaptureDescriptorDataInfoARM( TensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorCaptureDescriptorDataInfoARM( VkTensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorCaptureDescriptorDataInfoARM( *reinterpret_cast<TensorCaptureDescriptorDataInfoARM const *>( &rhs ) )
    {
    }

    TensorCaptureDescriptorDataInfoARM & operator=( TensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorCaptureDescriptorDataInfoARM & operator=( VkTensorCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorCaptureDescriptorDataInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorCaptureDescriptorDataInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorCaptureDescriptorDataInfoARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorCaptureDescriptorDataInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorCaptureDescriptorDataInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorCaptureDescriptorDataInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorCaptureDescriptorDataInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorCaptureDescriptorDataInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorCaptureDescriptorDataInfoARM const & ) const = default;
#else
    bool operator==( TensorCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor );
#  endif
    }

    bool operator!=( TensorCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eTensorCaptureDescriptorDataInfoARM;
    const void *  pNext  = {};
    TensorARM     tensor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorCaptureDescriptorDataInfoARM>
  {
    using Type = TensorCaptureDescriptorDataInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorCaptureDescriptorDataInfoARM>
  {
    using Type = TensorCaptureDescriptorDataInfoARM;
  };

  // wrapper struct for struct VkTensorMemoryBarrierARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorMemoryBarrierARM.html
  struct TensorMemoryBarrierARM
  {
    using NativeType = VkTensorMemoryBarrierARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorMemoryBarrierARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorMemoryBarrierARM( PipelineStageFlags2 srcStageMask_        = {},
                                                 AccessFlags2        srcAccessMask_       = {},
                                                 PipelineStageFlags2 dstStageMask_        = {},
                                                 AccessFlags2        dstAccessMask_       = {},
                                                 uint32_t            srcQueueFamilyIndex_ = {},
                                                 uint32_t            dstQueueFamilyIndex_ = {},
                                                 TensorARM           tensor_              = {},
                                                 const void *        pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , srcStageMask{ srcStageMask_ }
      , srcAccessMask{ srcAccessMask_ }
      , dstStageMask{ dstStageMask_ }
      , dstAccessMask{ dstAccessMask_ }
      , srcQueueFamilyIndex{ srcQueueFamilyIndex_ }
      , dstQueueFamilyIndex{ dstQueueFamilyIndex_ }
      , tensor{ tensor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorMemoryBarrierARM( TensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorMemoryBarrierARM( VkTensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorMemoryBarrierARM( *reinterpret_cast<TensorMemoryBarrierARM const *>( &rhs ) )
    {
    }

    TensorMemoryBarrierARM & operator=( TensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorMemoryBarrierARM & operator=( VkTensorMemoryBarrierARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorMemoryBarrierARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstAccessMask( AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryBarrierARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorMemoryBarrierARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorMemoryBarrierARM *>( this );
    }

    operator VkTensorMemoryBarrierARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorMemoryBarrierARM *>( this );
    }

    operator VkTensorMemoryBarrierARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorMemoryBarrierARM *>( this );
    }

    operator VkTensorMemoryBarrierARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorMemoryBarrierARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               PipelineStageFlags2 const &,
               AccessFlags2 const &,
               uint32_t const &,
               uint32_t const &,
               TensorARM const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, tensor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorMemoryBarrierARM const & ) const = default;
#else
    bool operator==( TensorMemoryBarrierARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( tensor == rhs.tensor );
#  endif
    }

    bool operator!=( TensorMemoryBarrierARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType               = StructureType::eTensorMemoryBarrierARM;
    const void *        pNext               = {};
    PipelineStageFlags2 srcStageMask        = {};
    AccessFlags2        srcAccessMask       = {};
    PipelineStageFlags2 dstStageMask        = {};
    AccessFlags2        dstAccessMask       = {};
    uint32_t            srcQueueFamilyIndex = {};
    uint32_t            dstQueueFamilyIndex = {};
    TensorARM           tensor              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorMemoryBarrierARM>
  {
    using Type = TensorMemoryBarrierARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorMemoryBarrierARM>
  {
    using Type = TensorMemoryBarrierARM;
  };

  // wrapper struct for struct VkTensorDependencyInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorDependencyInfoARM.html
  struct TensorDependencyInfoARM
  {
    using NativeType = VkTensorDependencyInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorDependencyInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorDependencyInfoARM( uint32_t                       tensorMemoryBarrierCount_ = {},
                                                  const TensorMemoryBarrierARM * pTensorMemoryBarriers_    = {},
                                                  const void *                   pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorMemoryBarrierCount{ tensorMemoryBarrierCount_ }
      , pTensorMemoryBarriers{ pTensorMemoryBarriers_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorDependencyInfoARM( TensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorDependencyInfoARM( VkTensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorDependencyInfoARM( *reinterpret_cast<TensorDependencyInfoARM const *>( &rhs ) )
    {
    }

    TensorDependencyInfoARM & operator=( TensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorDependencyInfoARM & operator=( VkTensorDependencyInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorDependencyInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setTensorMemoryBarrierCount( uint32_t tensorMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorMemoryBarrierCount = tensorMemoryBarrierCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorDependencyInfoARM & setPTensorMemoryBarriers( const TensorMemoryBarrierARM * pTensorMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pTensorMemoryBarriers = pTensorMemoryBarriers_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorDependencyInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorDependencyInfoARM *>( this );
    }

    operator VkTensorDependencyInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorDependencyInfoARM *>( this );
    }

    operator VkTensorDependencyInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorDependencyInfoARM *>( this );
    }

    operator VkTensorDependencyInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorDependencyInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const TensorMemoryBarrierARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorMemoryBarrierCount, pTensorMemoryBarriers );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorDependencyInfoARM const & ) const = default;
#else
    bool operator==( TensorDependencyInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorMemoryBarrierCount == rhs.tensorMemoryBarrierCount ) &&
             ( pTensorMemoryBarriers == rhs.pTensorMemoryBarriers );
#  endif
    }

    bool operator!=( TensorDependencyInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType                    = StructureType::eTensorDependencyInfoARM;
    const void *                   pNext                    = {};
    uint32_t                       tensorMemoryBarrierCount = {};
    const TensorMemoryBarrierARM * pTensorMemoryBarriers    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorDependencyInfoARM>
  {
    using Type = TensorDependencyInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorDependencyInfoARM>
  {
    using Type = TensorDependencyInfoARM;
  };

  // wrapper struct for struct VkTensorFormatPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorFormatPropertiesARM.html
  struct TensorFormatPropertiesARM
  {
    using NativeType = VkTensorFormatPropertiesARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorFormatPropertiesARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorFormatPropertiesARM( FormatFeatureFlags2 optimalTilingTensorFeatures_ = {},
                                                    FormatFeatureFlags2 linearTilingTensorFeatures_  = {},
                                                    void *              pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , optimalTilingTensorFeatures{ optimalTilingTensorFeatures_ }
      , linearTilingTensorFeatures{ linearTilingTensorFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorFormatPropertiesARM( TensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorFormatPropertiesARM( VkTensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorFormatPropertiesARM( *reinterpret_cast<TensorFormatPropertiesARM const *>( &rhs ) )
    {
    }

    TensorFormatPropertiesARM & operator=( TensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorFormatPropertiesARM & operator=( VkTensorFormatPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorFormatPropertiesARM const *>( &rhs );
      return *this;
    }

    operator VkTensorFormatPropertiesARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorFormatPropertiesARM *>( this );
    }

    operator VkTensorFormatPropertiesARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorFormatPropertiesARM *>( this );
    }

    operator VkTensorFormatPropertiesARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorFormatPropertiesARM *>( this );
    }

    operator VkTensorFormatPropertiesARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorFormatPropertiesARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, FormatFeatureFlags2 const &, FormatFeatureFlags2 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, optimalTilingTensorFeatures, linearTilingTensorFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorFormatPropertiesARM const & ) const = default;
#else
    bool operator==( TensorFormatPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalTilingTensorFeatures == rhs.optimalTilingTensorFeatures ) &&
             ( linearTilingTensorFeatures == rhs.linearTilingTensorFeatures );
#  endif
    }

    bool operator!=( TensorFormatPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType       sType                       = StructureType::eTensorFormatPropertiesARM;
    void *              pNext                       = {};
    FormatFeatureFlags2 optimalTilingTensorFeatures = {};
    FormatFeatureFlags2 linearTilingTensorFeatures  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorFormatPropertiesARM>
  {
    using Type = TensorFormatPropertiesARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorFormatPropertiesARM>
  {
    using Type = TensorFormatPropertiesARM;
  };

  // wrapper struct for struct VkTensorMemoryRequirementsInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorMemoryRequirementsInfoARM.html
  struct TensorMemoryRequirementsInfoARM
  {
    using NativeType = VkTensorMemoryRequirementsInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorMemoryRequirementsInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorMemoryRequirementsInfoARM( TensorARM tensor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensor{ tensor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorMemoryRequirementsInfoARM( TensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorMemoryRequirementsInfoARM( VkTensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorMemoryRequirementsInfoARM( *reinterpret_cast<TensorMemoryRequirementsInfoARM const *>( &rhs ) )
    {
    }

    TensorMemoryRequirementsInfoARM & operator=( TensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorMemoryRequirementsInfoARM & operator=( VkTensorMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorMemoryRequirementsInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorMemoryRequirementsInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorMemoryRequirementsInfoARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorMemoryRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( this );
    }

    operator VkTensorMemoryRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorMemoryRequirementsInfoARM *>( this );
    }

    operator VkTensorMemoryRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( this );
    }

    operator VkTensorMemoryRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorMemoryRequirementsInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorMemoryRequirementsInfoARM const & ) const = default;
#else
    bool operator==( TensorMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor );
#  endif
    }

    bool operator!=( TensorMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eTensorMemoryRequirementsInfoARM;
    const void *  pNext  = {};
    TensorARM     tensor = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorMemoryRequirementsInfoARM>
  {
    using Type = TensorMemoryRequirementsInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorMemoryRequirementsInfoARM>
  {
    using Type = TensorMemoryRequirementsInfoARM;
  };

  // wrapper struct for struct VkTensorViewCaptureDescriptorDataInfoARM, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCaptureDescriptorDataInfoARM.html
  struct TensorViewCaptureDescriptorDataInfoARM
  {
    using NativeType = VkTensorViewCaptureDescriptorDataInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorViewCaptureDescriptorDataInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorViewCaptureDescriptorDataInfoARM( TensorViewARM tensorView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorView{ tensorView_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorViewCaptureDescriptorDataInfoARM( TensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorViewCaptureDescriptorDataInfoARM( VkTensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorViewCaptureDescriptorDataInfoARM( *reinterpret_cast<TensorViewCaptureDescriptorDataInfoARM const *>( &rhs ) )
    {
    }

    TensorViewCaptureDescriptorDataInfoARM & operator=( TensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorViewCaptureDescriptorDataInfoARM & operator=( VkTensorViewCaptureDescriptorDataInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorViewCaptureDescriptorDataInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorViewCaptureDescriptorDataInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorViewCaptureDescriptorDataInfoARM & setTensorView( TensorViewARM tensorView_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorView = tensorView_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorViewCaptureDescriptorDataInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorViewCaptureDescriptorDataInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorViewCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorViewCaptureDescriptorDataInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( this );
    }

    operator VkTensorViewCaptureDescriptorDataInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorViewCaptureDescriptorDataInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorViewARM const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorView );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorViewCaptureDescriptorDataInfoARM const & ) const = default;
#else
    bool operator==( TensorViewCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorView == rhs.tensorView );
#  endif
    }

    bool operator!=( TensorViewCaptureDescriptorDataInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eTensorViewCaptureDescriptorDataInfoARM;
    const void *  pNext      = {};
    TensorViewARM tensorView = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorViewCaptureDescriptorDataInfoARM>
  {
    using Type = TensorViewCaptureDescriptorDataInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorViewCaptureDescriptorDataInfoARM>
  {
    using Type = TensorViewCaptureDescriptorDataInfoARM;
  };

  // wrapper struct for struct VkTensorViewCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateInfoARM.html
  struct TensorViewCreateInfoARM
  {
    using NativeType = VkTensorViewCreateInfoARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTensorViewCreateInfoARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateFlagsARM flags_  = {},
                                                  TensorARM                tensor_ = {},
                                                  Format                   format_ = Format::eUndefined,
                                                  const void *             pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , tensor{ tensor_ }
      , format{ format_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TensorViewCreateInfoARM( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TensorViewCreateInfoARM( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TensorViewCreateInfoARM( *reinterpret_cast<TensorViewCreateInfoARM const *>( &rhs ) )
    {
    }

    TensorViewCreateInfoARM & operator=( TensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TensorViewCreateInfoARM & operator=( VkTensorViewCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TensorViewCreateInfoARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFlags( TensorViewCreateFlagsARM flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setTensor( TensorARM tensor_ ) VULKAN_HPP_NOEXCEPT
    {
      tensor = tensor_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TensorViewCreateInfoARM & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTensorViewCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTensorViewCreateInfoARM *>( this );
    }

    operator VkTensorViewCreateInfoARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTensorViewCreateInfoARM *>( this );
    }

    operator VkTensorViewCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTensorViewCreateInfoARM *>( this );
    }

    operator VkTensorViewCreateInfoARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTensorViewCreateInfoARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, TensorViewCreateFlagsARM const &, TensorARM const &, Format const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, tensor, format );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TensorViewCreateInfoARM const & ) const = default;
#else
    bool operator==( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( tensor == rhs.tensor ) && ( format == rhs.format );
#  endif
    }

    bool operator!=( TensorViewCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType  = StructureType::eTensorViewCreateInfoARM;
    const void *             pNext  = {};
    TensorViewCreateFlagsARM flags  = {};
    TensorARM                tensor = {};
    Format                   format = Format::eUndefined;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTensorViewCreateInfoARM>
  {
    using Type = TensorViewCreateInfoARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTensorViewCreateInfoARM>
  {
    using Type = TensorViewCreateInfoARM;
  };

  // wrapper struct for struct VkTextureLODGatherFormatPropertiesAMD, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTextureLODGatherFormatPropertiesAMD.html
  struct TextureLODGatherFormatPropertiesAMD
  {
    using NativeType = VkTextureLODGatherFormatPropertiesAMD;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTextureLodGatherFormatPropertiesAMD;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( Bool32 supportsTextureGatherLODBiasAMD_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , supportsTextureGatherLODBiasAMD{ supportsTextureGatherLODBiasAMD_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
    {
    }

    TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs );
      return *this;
    }

    operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

    operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

    operator VkTextureLODGatherFormatPropertiesAMD const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

    operator VkTextureLODGatherFormatPropertiesAMD *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default;
#else
    bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
#  endif
    }

    bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                           = StructureType::eTextureLodGatherFormatPropertiesAMD;
    void *        pNext                           = {};
    Bool32        supportsTextureGatherLODBiasAMD = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTextureLODGatherFormatPropertiesAMD>
  {
    using Type = TextureLODGatherFormatPropertiesAMD;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
  {
    using Type = TextureLODGatherFormatPropertiesAMD;
  };

  // wrapper struct for struct VkTileMemoryBindInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryBindInfoQCOM.html
  struct TileMemoryBindInfoQCOM
  {
    using NativeType = VkTileMemoryBindInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTileMemoryBindInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memory{ memory_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TileMemoryBindInfoQCOM( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TileMemoryBindInfoQCOM( *reinterpret_cast<TileMemoryBindInfoQCOM const *>( &rhs ) )
    {
    }

    TileMemoryBindInfoQCOM & operator=( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TileMemoryBindInfoQCOM & operator=( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TileMemoryBindInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setMemory( DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTileMemoryBindInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTileMemoryBindInfoQCOM *>( this );
    }

    operator VkTileMemoryBindInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTileMemoryBindInfoQCOM *>( this );
    }

    operator VkTileMemoryBindInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTileMemoryBindInfoQCOM *>( this );
    }

    operator VkTileMemoryBindInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTileMemoryBindInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceMemory const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memory );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TileMemoryBindInfoQCOM const & ) const = default;
#else
    bool operator==( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
#  endif
    }

    bool operator!=( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eTileMemoryBindInfoQCOM;
    const void *  pNext  = {};
    DeviceMemory  memory = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTileMemoryBindInfoQCOM>
  {
    using Type = TileMemoryBindInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTileMemoryBindInfoQCOM>
  {
    using Type = TileMemoryBindInfoQCOM;
  };

  // wrapper struct for struct VkTileMemoryRequirementsQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryRequirementsQCOM.html
  struct TileMemoryRequirementsQCOM
  {
    using NativeType = VkTileMemoryRequirementsQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTileMemoryRequirementsQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM( DeviceSize size_ = {}, DeviceSize alignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , size{ size_ }
      , alignment{ alignment_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TileMemoryRequirementsQCOM( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TileMemoryRequirementsQCOM( *reinterpret_cast<TileMemoryRequirementsQCOM const *>( &rhs ) )
    {
    }

    TileMemoryRequirementsQCOM & operator=( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TileMemoryRequirementsQCOM & operator=( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TileMemoryRequirementsQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setAlignment( DeviceSize alignment_ ) VULKAN_HPP_NOEXCEPT
    {
      alignment = alignment_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTileMemoryRequirementsQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTileMemoryRequirementsQCOM *>( this );
    }

    operator VkTileMemoryRequirementsQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTileMemoryRequirementsQCOM *>( this );
    }

    operator VkTileMemoryRequirementsQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTileMemoryRequirementsQCOM *>( this );
    }

    operator VkTileMemoryRequirementsQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTileMemoryRequirementsQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, size, alignment );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TileMemoryRequirementsQCOM const & ) const = default;
#else
    bool operator==( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ) && ( alignment == rhs.alignment );
#  endif
    }

    bool operator!=( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eTileMemoryRequirementsQCOM;
    void *        pNext     = {};
    DeviceSize    size      = {};
    DeviceSize    alignment = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTileMemoryRequirementsQCOM>
  {
    using Type = TileMemoryRequirementsQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTileMemoryRequirementsQCOM>
  {
    using Type = TileMemoryRequirementsQCOM;
  };

  // wrapper struct for struct VkTileMemorySizeInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemorySizeInfoQCOM.html
  struct TileMemorySizeInfoQCOM
  {
    using NativeType = VkTileMemorySizeInfoQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTileMemorySizeInfoQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM( DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , size{ size_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TileMemorySizeInfoQCOM( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : TileMemorySizeInfoQCOM( *reinterpret_cast<TileMemorySizeInfoQCOM const *>( &rhs ) )
    {
    }

    TileMemorySizeInfoQCOM & operator=( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TileMemorySizeInfoQCOM & operator=( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TileMemorySizeInfoQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setSize( DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTileMemorySizeInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTileMemorySizeInfoQCOM *>( this );
    }

    operator VkTileMemorySizeInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTileMemorySizeInfoQCOM *>( this );
    }

    operator VkTileMemorySizeInfoQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTileMemorySizeInfoQCOM *>( this );
    }

    operator VkTileMemorySizeInfoQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTileMemorySizeInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, size );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TileMemorySizeInfoQCOM const & ) const = default;
#else
    bool operator==( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size );
#  endif
    }

    bool operator!=( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType = StructureType::eTileMemorySizeInfoQCOM;
    const void *  pNext = {};
    DeviceSize    size  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTileMemorySizeInfoQCOM>
  {
    using Type = TileMemorySizeInfoQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTileMemorySizeInfoQCOM>
  {
    using Type = TileMemorySizeInfoQCOM;
  };

  // wrapper struct for struct VkTilePropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTilePropertiesQCOM.html
  struct TilePropertiesQCOM
  {
    using NativeType = VkTilePropertiesQCOM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTilePropertiesQCOM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      TilePropertiesQCOM( Extent3D tileSize_ = {}, Extent2D apronSize_ = {}, Offset2D origin_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tileSize{ tileSize_ }
      , apronSize{ apronSize_ }
      , origin{ origin_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast<TilePropertiesQCOM const *>( &rhs ) ) {}

    TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TilePropertiesQCOM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tileSize = tileSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT
    {
      apronSize = apronSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT
    {
      origin = origin_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTilePropertiesQCOM *>( this );
    }

    operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTilePropertiesQCOM *>( this );
    }

    operator VkTilePropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTilePropertiesQCOM *>( this );
    }

    operator VkTilePropertiesQCOM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTilePropertiesQCOM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent3D const &, Extent2D const &, Offset2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tileSize, apronSize, origin );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TilePropertiesQCOM const & ) const = default;
#else
    bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin );
#  endif
    }

    bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType     = StructureType::eTilePropertiesQCOM;
    void *        pNext     = {};
    Extent3D      tileSize  = {};
    Extent2D      apronSize = {};
    Offset2D      origin    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTilePropertiesQCOM>
  {
    using Type = TilePropertiesQCOM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTilePropertiesQCOM>
  {
    using Type = TilePropertiesQCOM;
  };

  // wrapper struct for struct VkTimelineSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTimelineSemaphoreSubmitInfo.html
  struct TimelineSemaphoreSubmitInfo
  {
    using NativeType = VkTimelineSemaphoreSubmitInfo;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eTimelineSemaphoreSubmitInfo;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t         waitSemaphoreValueCount_   = {},
                                                      const uint64_t * pWaitSemaphoreValues_      = {},
                                                      uint32_t         signalSemaphoreValueCount_ = {},
                                                      const uint64_t * pSignalSemaphoreValues_    = {},
                                                      const void *     pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , waitSemaphoreValueCount{ waitSemaphoreValueCount_ }
      , pWaitSemaphoreValues{ pWaitSemaphoreValues_ }
      , signalSemaphoreValueCount{ signalSemaphoreValueCount_ }
      , pSignalSemaphoreValues{ pSignalSemaphoreValues_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo( ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
                                 ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {},
                                 const void *                                    pNext_                 = nullptr )
      : pNext( pNext_ )
      , waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
      , pWaitSemaphoreValues( waitSemaphoreValues_.data() )
      , signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
      , pSignalSemaphoreValues( signalSemaphoreValues_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValueCount = waitSemaphoreValueCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreValues = pWaitSemaphoreValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
      pWaitSemaphoreValues    = waitSemaphoreValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValueCount = signalSemaphoreValueCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreValues = pSignalSemaphoreValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
      pSignalSemaphoreValues    = signalSemaphoreValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo *>( this );
    }

    operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo *>( this );
    }

    operator VkTimelineSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTimelineSemaphoreSubmitInfo *>( this );
    }

    operator VkTimelineSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTimelineSemaphoreSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
#else
    bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) &&
             ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) &&
             ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
#  endif
    }

    bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType                     = StructureType::eTimelineSemaphoreSubmitInfo;
    const void *     pNext                     = {};
    uint32_t         waitSemaphoreValueCount   = {};
    const uint64_t * pWaitSemaphoreValues      = {};
    uint32_t         signalSemaphoreValueCount = {};
    const uint64_t * pSignalSemaphoreValues    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTimelineSemaphoreSubmitInfo>
  {
    using Type = TimelineSemaphoreSubmitInfo;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
  {
    using Type = TimelineSemaphoreSubmitInfo;
  };

  using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;

  // wrapper struct for struct VkTraceRaysIndirectCommand2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommand2KHR.html
  struct TraceRaysIndirectCommand2KHR
  {
    using NativeType = VkTraceRaysIndirectCommand2KHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( DeviceAddress raygenShaderRecordAddress_         = {},
                                                       DeviceSize    raygenShaderRecordSize_            = {},
                                                       DeviceAddress missShaderBindingTableAddress_     = {},
                                                       DeviceSize    missShaderBindingTableSize_        = {},
                                                       DeviceSize    missShaderBindingTableStride_      = {},
                                                       DeviceAddress hitShaderBindingTableAddress_      = {},
                                                       DeviceSize    hitShaderBindingTableSize_         = {},
                                                       DeviceSize    hitShaderBindingTableStride_       = {},
                                                       DeviceAddress callableShaderBindingTableAddress_ = {},
                                                       DeviceSize    callableShaderBindingTableSize_    = {},
                                                       DeviceSize    callableShaderBindingTableStride_  = {},
                                                       uint32_t      width_                             = {},
                                                       uint32_t      height_                            = {},
                                                       uint32_t      depth_                             = {} ) VULKAN_HPP_NOEXCEPT
      : raygenShaderRecordAddress{ raygenShaderRecordAddress_ }
      , raygenShaderRecordSize{ raygenShaderRecordSize_ }
      , missShaderBindingTableAddress{ missShaderBindingTableAddress_ }
      , missShaderBindingTableSize{ missShaderBindingTableSize_ }
      , missShaderBindingTableStride{ missShaderBindingTableStride_ }
      , hitShaderBindingTableAddress{ hitShaderBindingTableAddress_ }
      , hitShaderBindingTableSize{ hitShaderBindingTableSize_ }
      , hitShaderBindingTableStride{ hitShaderBindingTableStride_ }
      , callableShaderBindingTableAddress{ callableShaderBindingTableAddress_ }
      , callableShaderBindingTableSize{ callableShaderBindingTableSize_ }
      , callableShaderBindingTableStride{ callableShaderBindingTableStride_ }
      , width{ width_ }
      , height{ height_ }
      , depth{ depth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : TraceRaysIndirectCommand2KHR( *reinterpret_cast<TraceRaysIndirectCommand2KHR const *>( &rhs ) )
    {
    }

    TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TraceRaysIndirectCommand2KHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordAddress( DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      raygenShaderRecordAddress = raygenShaderRecordAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setRaygenShaderRecordSize( DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT
    {
      raygenShaderRecordSize = raygenShaderRecordSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableAddress( DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      missShaderBindingTableAddress = missShaderBindingTableAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableSize( DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
    {
      missShaderBindingTableSize = missShaderBindingTableSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setMissShaderBindingTableStride( DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
    {
      missShaderBindingTableStride = missShaderBindingTableStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableAddress( DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hitShaderBindingTableAddress = hitShaderBindingTableAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableSize( DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
    {
      hitShaderBindingTableSize = hitShaderBindingTableSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHitShaderBindingTableStride( DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
    {
      hitShaderBindingTableStride = hitShaderBindingTableStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
      setCallableShaderBindingTableAddress( DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      callableShaderBindingTableAddress = callableShaderBindingTableAddress_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setCallableShaderBindingTableSize( DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT
    {
      callableShaderBindingTableSize = callableShaderBindingTableSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR &
      setCallableShaderBindingTableStride( DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT
    {
      callableShaderBindingTableStride = callableShaderBindingTableStride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTraceRaysIndirectCommand2KHR *>( this );
    }

    operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTraceRaysIndirectCommand2KHR *>( this );
    }

    operator VkTraceRaysIndirectCommand2KHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTraceRaysIndirectCommand2KHR *>( this );
    }

    operator VkTraceRaysIndirectCommand2KHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTraceRaysIndirectCommand2KHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<DeviceAddress const &,
               DeviceSize const &,
               DeviceAddress const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceAddress const &,
               DeviceSize const &,
               DeviceSize const &,
               DeviceAddress const &,
               DeviceSize const &,
               DeviceSize const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( raygenShaderRecordAddress,
                       raygenShaderRecordSize,
                       missShaderBindingTableAddress,
                       missShaderBindingTableSize,
                       missShaderBindingTableStride,
                       hitShaderBindingTableAddress,
                       hitShaderBindingTableSize,
                       hitShaderBindingTableStride,
                       callableShaderBindingTableAddress,
                       callableShaderBindingTableSize,
                       callableShaderBindingTableStride,
                       width,
                       height,
                       depth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default;
#else
    bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) &&
             ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) &&
             ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) &&
             ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) &&
             ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) &&
             ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) &&
             ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) &&
             ( depth == rhs.depth );
#  endif
    }

    bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    DeviceAddress raygenShaderRecordAddress         = {};
    DeviceSize    raygenShaderRecordSize            = {};
    DeviceAddress missShaderBindingTableAddress     = {};
    DeviceSize    missShaderBindingTableSize        = {};
    DeviceSize    missShaderBindingTableStride      = {};
    DeviceAddress hitShaderBindingTableAddress      = {};
    DeviceSize    hitShaderBindingTableSize         = {};
    DeviceSize    hitShaderBindingTableStride       = {};
    DeviceAddress callableShaderBindingTableAddress = {};
    DeviceSize    callableShaderBindingTableSize    = {};
    DeviceSize    callableShaderBindingTableStride  = {};
    uint32_t      width                             = {};
    uint32_t      height                            = {};
    uint32_t      depth                             = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTraceRaysIndirectCommand2KHR>
  {
    using Type = TraceRaysIndirectCommand2KHR;
  };
#endif

  // wrapper struct for struct VkTraceRaysIndirectCommandKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommandKHR.html
  struct TraceRaysIndirectCommandKHR
  {
    using NativeType = VkTraceRaysIndirectCommandKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
      : width{ width_ }
      , height{ height_ }
      , depth{ depth_ }
    {
    }

    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
    {
    }

    explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
      : width( extent2D.width ), height( extent2D.height ), depth( depth_ )
    {
    }

    TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR *>( this );
    }

    operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTraceRaysIndirectCommandKHR *>( this );
    }

    operator VkTraceRaysIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkTraceRaysIndirectCommandKHR *>( this );
    }

    operator VkTraceRaysIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkTraceRaysIndirectCommandKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( width, height, depth );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default;
#else
    bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
#  endif
    }

    bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
    uint32_t depth  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkTraceRaysIndirectCommandKHR>
  {
    using Type = TraceRaysIndirectCommandKHR;
  };
#endif

  // wrapper struct for struct VkValidationCacheCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheCreateInfoEXT.html
  struct ValidationCacheCreateInfoEXT
  {
    using NativeType = VkValidationCacheCreateInfoEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eValidationCacheCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_           = {},
                                                       size_t                        initialDataSize_ = {},
                                                       const void *                  pInitialData_    = {},
                                                       const void *                  pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , initialDataSize{ initialDataSize_ }
      , pInitialData{ pInitialData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_, ArrayProxyNoTemporaries<const T> const & initialData_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialDataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
    {
      pInitialData = pInitialData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ValidationCacheCreateInfoEXT & setInitialData( ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialData_.size() * sizeof( T );
      pInitialData    = initialData_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( this );
    }

    operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationCacheCreateInfoEXT *>( this );
    }

    operator VkValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( this );
    }

    operator VkValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkValidationCacheCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ValidationCacheCreateFlagsEXT const &, size_t const &, const void * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default;
#else
    bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) &&
             ( pInitialData == rhs.pInitialData );
#  endif
    }

    bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType           = StructureType::eValidationCacheCreateInfoEXT;
    const void *                  pNext           = {};
    ValidationCacheCreateFlagsEXT flags           = {};
    size_t                        initialDataSize = {};
    const void *                  pInitialData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkValidationCacheCreateInfoEXT>
  {
    using Type = ValidationCacheCreateInfoEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
  {
    using Type = ValidationCacheCreateInfoEXT;
  };

  // wrapper struct for struct VkValidationFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFeaturesEXT.html
  struct ValidationFeaturesEXT
  {
    using NativeType = VkValidationFeaturesEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eValidationFeaturesEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t                            enabledValidationFeatureCount_  = {},
                                                const ValidationFeatureEnableEXT *  pEnabledValidationFeatures_     = {},
                                                uint32_t                            disabledValidationFeatureCount_ = {},
                                                const ValidationFeatureDisableEXT * pDisabledValidationFeatures_    = {},
                                                const void *                        pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , enabledValidationFeatureCount{ enabledValidationFeatureCount_ }
      , pEnabledValidationFeatures{ pEnabledValidationFeatures_ }
      , disabledValidationFeatureCount{ disabledValidationFeatureCount_ }
      , pDisabledValidationFeatures{ pDisabledValidationFeatures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT( ArrayProxyNoTemporaries<const ValidationFeatureEnableEXT> const &  enabledValidationFeatures_,
                           ArrayProxyNoTemporaries<const ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {},
                           const void *                                                       pNext_                      = nullptr )
      : pNext( pNext_ )
      , enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) )
      , pEnabledValidationFeatures( enabledValidationFeatures_.data() )
      , disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) )
      , pDisabledValidationFeatures( disabledValidationFeatures_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledValidationFeatureCount = enabledValidationFeatureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
      setPEnabledValidationFeatures( const ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pEnabledValidationFeatures = pEnabledValidationFeatures_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT &
      setEnabledValidationFeatures( ArrayProxyNoTemporaries<const ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
      pEnabledValidationFeatures    = enabledValidationFeatures_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationFeatureCount = disabledValidationFeatureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
      setPDisabledValidationFeatures( const ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pDisabledValidationFeatures = pDisabledValidationFeatures_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT &
      setDisabledValidationFeatures( ArrayProxyNoTemporaries<const ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
      pDisabledValidationFeatures    = disabledValidationFeatures_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationFeaturesEXT *>( this );
    }

    operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationFeaturesEXT *>( this );
    }

    operator VkValidationFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkValidationFeaturesEXT *>( this );
    }

    operator VkValidationFeaturesEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkValidationFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const ValidationFeatureEnableEXT * const &,
               uint32_t const &,
               const ValidationFeatureDisableEXT * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationFeaturesEXT const & ) const = default;
#else
    bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) &&
             ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) &&
             ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
#  endif
    }

    bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType                          = StructureType::eValidationFeaturesEXT;
    const void *                        pNext                          = {};
    uint32_t                            enabledValidationFeatureCount  = {};
    const ValidationFeatureEnableEXT *  pEnabledValidationFeatures     = {};
    uint32_t                            disabledValidationFeatureCount = {};
    const ValidationFeatureDisableEXT * pDisabledValidationFeatures    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkValidationFeaturesEXT>
  {
    using Type = ValidationFeaturesEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
  {
    using Type = ValidationFeaturesEXT;
  };

  // wrapper struct for struct VkValidationFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFlagsEXT.html
  struct ValidationFlagsEXT
  {
    using NativeType = VkValidationFlagsEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eValidationFlagsEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t                   disabledValidationCheckCount_ = {},
                                             const ValidationCheckEXT * pDisabledValidationChecks_    = {},
                                             const void *               pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , disabledValidationCheckCount{ disabledValidationCheckCount_ }
      , pDisabledValidationChecks{ pDisabledValidationChecks_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFlagsEXT( ArrayProxyNoTemporaries<const ValidationCheckEXT> const & disabledValidationChecks_, const void * pNext_ = nullptr )
      : pNext( pNext_ )
      , disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) )
      , pDisabledValidationChecks( disabledValidationChecks_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ValidationFlagsEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationCheckCount = disabledValidationCheckCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks( const ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
    {
      pDisabledValidationChecks = pDisabledValidationChecks_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFlagsEXT & setDisabledValidationChecks( ArrayProxyNoTemporaries<const ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
      pDisabledValidationChecks    = disabledValidationChecks_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationFlagsEXT *>( this );
    }

    operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationFlagsEXT *>( this );
    }

    operator VkValidationFlagsEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkValidationFlagsEXT *>( this );
    }

    operator VkValidationFlagsEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkValidationFlagsEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const ValidationCheckEXT * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationFlagsEXT const & ) const = default;
#else
    bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) &&
             ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
#  endif
    }

    bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType                        = StructureType::eValidationFlagsEXT;
    const void *               pNext                        = {};
    uint32_t                   disabledValidationCheckCount = {};
    const ValidationCheckEXT * pDisabledValidationChecks    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkValidationFlagsEXT>
  {
    using Type = ValidationFlagsEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eValidationFlagsEXT>
  {
    using Type = ValidationFlagsEXT;
  };

  // wrapper struct for struct VkVertexInputAttributeDescription2EXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription2EXT.html
  struct VertexInputAttributeDescription2EXT
  {
    using NativeType = VkVertexInputAttributeDescription2EXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVertexInputAttributeDescription2EXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(
      uint32_t location_ = {}, uint32_t binding_ = {}, Format format_ = Format::eUndefined, uint32_t offset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , location{ location_ }
      , binding{ binding_ }
      , format{ format_ }
      , offset{ offset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
    {
    }

    VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
    {
      location = location_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( this );
    }

    operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputAttributeDescription2EXT *>( this );
    }

    operator VkVertexInputAttributeDescription2EXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( this );
    }

    operator VkVertexInputAttributeDescription2EXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVertexInputAttributeDescription2EXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, Format const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, location, binding, format, offset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
#else
    bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) &&
             ( offset == rhs.offset );
#  endif
    }

    bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eVertexInputAttributeDescription2EXT;
    void *        pNext    = {};
    uint32_t      location = {};
    uint32_t      binding  = {};
    Format        format   = Format::eUndefined;
    uint32_t      offset   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVertexInputAttributeDescription2EXT>
  {
    using Type = VertexInputAttributeDescription2EXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
  {
    using Type = VertexInputAttributeDescription2EXT;
  };

  // wrapper struct for struct VkVertexInputBindingDescription2EXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription2EXT.html
  struct VertexInputBindingDescription2EXT
  {
    using NativeType = VkVertexInputBindingDescription2EXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVertexInputBindingDescription2EXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t        binding_   = {},
                                                            uint32_t        stride_    = {},
                                                            VertexInputRate inputRate_ = VertexInputRate::eVertex,
                                                            uint32_t        divisor_   = {},
                                                            void *          pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , binding{ binding_ }
      , stride{ stride_ }
      , inputRate{ inputRate_ }
      , divisor{ divisor_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
    {
    }

    VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
    {
      inputRate = inputRate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
    {
      divisor = divisor_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( this );
    }

    operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDescription2EXT *>( this );
    }

    operator VkVertexInputBindingDescription2EXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( this );
    }

    operator VkVertexInputBindingDescription2EXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVertexInputBindingDescription2EXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, uint32_t const &, VertexInputRate const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, binding, stride, inputRate, divisor );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
#else
    bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) &&
             ( divisor == rhs.divisor );
#  endif
    }

    bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType   sType     = StructureType::eVertexInputBindingDescription2EXT;
    void *          pNext     = {};
    uint32_t        binding   = {};
    uint32_t        stride    = {};
    VertexInputRate inputRate = VertexInputRate::eVertex;
    uint32_t        divisor   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVertexInputBindingDescription2EXT>
  {
    using Type = VertexInputBindingDescription2EXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
  {
    using Type = VertexInputBindingDescription2EXT;
  };

#if defined( VK_USE_PLATFORM_VI_NN )
  // wrapper struct for struct VkViSurfaceCreateInfoNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViSurfaceCreateInfoNN.html
  struct ViSurfaceCreateInfoNN
  {
    using NativeType = VkViSurfaceCreateInfoNN;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eViSurfaceCreateInfoNN;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
      : ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
    {
    }

    ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViSurfaceCreateInfoNN *>( this );
    }

    operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViSurfaceCreateInfoNN *>( this );
    }

    operator VkViSurfaceCreateInfoNN const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkViSurfaceCreateInfoNN *>( this );
    }

    operator VkViSurfaceCreateInfoNN *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkViSurfaceCreateInfoNN *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ViSurfaceCreateFlagsNN const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default;
#  else
    bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
#    endif
    }

    bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType          sType  = StructureType::eViSurfaceCreateInfoNN;
    const void *           pNext  = {};
    ViSurfaceCreateFlagsNN flags  = {};
    void *                 window = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkViSurfaceCreateInfoNN>
  {
    using Type = ViSurfaceCreateInfoNN;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
  {
    using Type = ViSurfaceCreateInfoNN;
  };
#endif /*VK_USE_PLATFORM_VI_NN*/

  // wrapper struct for struct VkVideoPictureResourceInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoPictureResourceInfoKHR.html
  struct VideoPictureResourceInfoKHR
  {
    using NativeType = VkVideoPictureResourceInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoPictureResourceInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( Offset2D     codedOffset_      = {},
                                                      Extent2D     codedExtent_      = {},
                                                      uint32_t     baseArrayLayer_   = {},
                                                      ImageView    imageViewBinding_ = {},
                                                      const void * pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , codedOffset{ codedOffset_ }
      , codedExtent{ codedExtent_ }
      , baseArrayLayer{ baseArrayLayer_ }
      , imageViewBinding{ imageViewBinding_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoPictureResourceInfoKHR( *reinterpret_cast<VideoPictureResourceInfoKHR const *>( &rhs ) )
    {
    }

    VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoPictureResourceInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      codedOffset = codedOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      codedExtent = codedExtent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewBinding = imageViewBinding_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoPictureResourceInfoKHR *>( this );
    }

    operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoPictureResourceInfoKHR *>( this );
    }

    operator VkVideoPictureResourceInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoPictureResourceInfoKHR *>( this );
    }

    operator VkVideoPictureResourceInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoPictureResourceInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Offset2D const &, Extent2D const &, uint32_t const &, ImageView const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default;
#else
    bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) &&
             ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding );
#  endif
    }

    bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType            = StructureType::eVideoPictureResourceInfoKHR;
    const void *  pNext            = {};
    Offset2D      codedOffset      = {};
    Extent2D      codedExtent      = {};
    uint32_t      baseArrayLayer   = {};
    ImageView     imageViewBinding = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoPictureResourceInfoKHR>
  {
    using Type = VideoPictureResourceInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoPictureResourceInfoKHR>
  {
    using Type = VideoPictureResourceInfoKHR;
  };

  // wrapper struct for struct VkVideoReferenceSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoReferenceSlotInfoKHR.html
  struct VideoReferenceSlotInfoKHR
  {
    using NativeType = VkVideoReferenceSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoReferenceSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t                             slotIndex_        = {},
                                                    const VideoPictureResourceInfoKHR * pPictureResource_ = {},
                                                    const void *                        pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , slotIndex{ slotIndex_ }
      , pPictureResource{ pPictureResource_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoReferenceSlotInfoKHR( *reinterpret_cast<VideoReferenceSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoReferenceSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      slotIndex = slotIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPPictureResource( const VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      pPictureResource = pPictureResource_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoReferenceSlotInfoKHR *>( this );
    }

    operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoReferenceSlotInfoKHR *>( this );
    }

    operator VkVideoReferenceSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoReferenceSlotInfoKHR *>( this );
    }

    operator VkVideoReferenceSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoReferenceSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, int32_t const &, const VideoPictureResourceInfoKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, slotIndex, pPictureResource );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource );
#  endif
    }

    bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType            = StructureType::eVideoReferenceSlotInfoKHR;
    const void *                        pNext            = {};
    int32_t                             slotIndex        = {};
    const VideoPictureResourceInfoKHR * pPictureResource = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoReferenceSlotInfoKHR>
  {
    using Type = VideoReferenceSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoReferenceSlotInfoKHR>
  {
    using Type = VideoReferenceSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoBeginCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoBeginCodingInfoKHR.html
  struct VideoBeginCodingInfoKHR
  {
    using NativeType = VkVideoBeginCodingInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoBeginCodingInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingFlagsKHR          flags_                  = {},
                                                  VideoSessionKHR                   videoSession_           = {},
                                                  VideoSessionParametersKHR         videoSessionParameters_ = {},
                                                  uint32_t                          referenceSlotCount_     = {},
                                                  const VideoReferenceSlotInfoKHR * pReferenceSlots_        = {},
                                                  const void *                      pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , videoSession{ videoSession_ }
      , videoSessionParameters{ videoSessionParameters_ }
      , referenceSlotCount{ referenceSlotCount_ }
      , pReferenceSlots{ pReferenceSlots_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoBeginCodingInfoKHR( VideoBeginCodingFlagsKHR                                         flags_,
                             VideoSessionKHR                                                  videoSession_,
                             VideoSessionParametersKHR                                        videoSessionParameters_,
                             ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_,
                             const void *                                                     pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , videoSession( videoSession_ )
      , videoSessionParameters( videoSessionParameters_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSession = videoSession_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters( VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSessionParameters = videoSessionParameters_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPReferenceSlots( const VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoBeginCodingInfoKHR & setReferenceSlots( ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( this );
    }

    operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoBeginCodingInfoKHR *>( this );
    }

    operator VkVideoBeginCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( this );
    }

    operator VkVideoBeginCodingInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoBeginCodingInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoBeginCodingFlagsKHR const &,
               VideoSessionKHR const &,
               VideoSessionParametersKHR const &,
               uint32_t const &,
               const VideoReferenceSlotInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default;
#else
    bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) &&
             ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
             ( pReferenceSlots == rhs.pReferenceSlots );
#  endif
    }

    bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                  = StructureType::eVideoBeginCodingInfoKHR;
    const void *                      pNext                  = {};
    VideoBeginCodingFlagsKHR          flags                  = {};
    VideoSessionKHR                   videoSession           = {};
    VideoSessionParametersKHR         videoSessionParameters = {};
    uint32_t                          referenceSlotCount     = {};
    const VideoReferenceSlotInfoKHR * pReferenceSlots        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoBeginCodingInfoKHR>
  {
    using Type = VideoBeginCodingInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
  {
    using Type = VideoBeginCodingInfoKHR;
  };

  // wrapper struct for struct VkVideoCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCapabilitiesKHR.html
  struct VideoCapabilitiesKHR
  {
    using NativeType = VkVideoCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilityFlagsKHR flags_                             = {},
                                                  DeviceSize              minBitstreamBufferOffsetAlignment_ = {},
                                                  DeviceSize              minBitstreamBufferSizeAlignment_   = {},
                                                  Extent2D                pictureAccessGranularity_          = {},
                                                  Extent2D                minCodedExtent_                    = {},
                                                  Extent2D                maxCodedExtent_                    = {},
                                                  uint32_t                maxDpbSlots_                       = {},
                                                  uint32_t                maxActiveReferencePictures_        = {},
                                                  ExtensionProperties     stdHeaderVersion_                  = {},
                                                  void *                  pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , minBitstreamBufferOffsetAlignment{ minBitstreamBufferOffsetAlignment_ }
      , minBitstreamBufferSizeAlignment{ minBitstreamBufferSizeAlignment_ }
      , pictureAccessGranularity{ pictureAccessGranularity_ }
      , minCodedExtent{ minCodedExtent_ }
      , maxCodedExtent{ maxCodedExtent_ }
      , maxDpbSlots{ maxDpbSlots_ }
      , maxActiveReferencePictures{ maxActiveReferencePictures_ }
      , stdHeaderVersion{ stdHeaderVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoCapabilitiesKHR *>( this );
    }

    operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoCapabilitiesKHR *>( this );
    }

    operator VkVideoCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoCapabilitiesKHR *>( this );
    }

    operator VkVideoCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoCapabilityFlagsKHR const &,
               DeviceSize const &,
               DeviceSize const &,
               Extent2D const &,
               Extent2D const &,
               Extent2D const &,
               uint32_t const &,
               uint32_t const &,
               ExtensionProperties const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       minBitstreamBufferOffsetAlignment,
                       minBitstreamBufferSizeAlignment,
                       pictureAccessGranularity,
                       minCodedExtent,
                       maxCodedExtent,
                       maxDpbSlots,
                       maxActiveReferencePictures,
                       stdHeaderVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) &&
             ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && ( pictureAccessGranularity == rhs.pictureAccessGranularity ) &&
             ( minCodedExtent == rhs.minCodedExtent ) && ( maxCodedExtent == rhs.maxCodedExtent ) && ( maxDpbSlots == rhs.maxDpbSlots ) &&
             ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( stdHeaderVersion == rhs.stdHeaderVersion );
#  endif
    }

    bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType           sType                             = StructureType::eVideoCapabilitiesKHR;
    void *                  pNext                             = {};
    VideoCapabilityFlagsKHR flags                             = {};
    DeviceSize              minBitstreamBufferOffsetAlignment = {};
    DeviceSize              minBitstreamBufferSizeAlignment   = {};
    Extent2D                pictureAccessGranularity          = {};
    Extent2D                minCodedExtent                    = {};
    Extent2D                maxCodedExtent                    = {};
    uint32_t                maxDpbSlots                       = {};
    uint32_t                maxActiveReferencePictures        = {};
    ExtensionProperties     stdHeaderVersion                  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoCapabilitiesKHR>
  {
    using Type = VideoCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
  {
    using Type = VideoCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoCodingControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodingControlInfoKHR.html
  struct VideoCodingControlInfoKHR
  {
    using NativeType = VkVideoCodingControlInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoCodingControlInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
    {
    }

    VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoCodingControlInfoKHR *>( this );
    }

    operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoCodingControlInfoKHR *>( this );
    }

    operator VkVideoCodingControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoCodingControlInfoKHR *>( this );
    }

    operator VkVideoCodingControlInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoCodingControlInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoCodingControlFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoCodingControlInfoKHR const & ) const = default;
#else
    bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType = StructureType::eVideoCodingControlInfoKHR;
    const void *               pNext = {};
    VideoCodingControlFlagsKHR flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoCodingControlInfoKHR>
  {
    using Type = VideoCodingControlInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
  {
    using Type = VideoCodingControlInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1CapabilitiesKHR.html
  struct VideoDecodeAV1CapabilitiesKHR
  {
    using NativeType = VkVideoDecodeAV1CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( StdVideoAV1Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxLevel{ maxLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1CapabilitiesKHR( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1CapabilitiesKHR( *reinterpret_cast<VideoDecodeAV1CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoDecodeAV1CapabilitiesKHR & operator=( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1CapabilitiesKHR & operator=( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, StdVideoAV1Level const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 );
    }

    bool operator!=( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType    sType    = StructureType::eVideoDecodeAv1CapabilitiesKHR;
    void *           pNext    = {};
    StdVideoAV1Level maxLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1CapabilitiesKHR>
  {
    using Type = VideoDecodeAV1CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1CapabilitiesKHR>
  {
    using Type = VideoDecodeAV1CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1DpbSlotInfoKHR.html
  struct VideoDecodeAV1DpbSlotInfoKHR
  {
    using NativeType = VkVideoDecodeAV1DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ = {},
                                                       const void *                           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1DpbSlotInfoKHR( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeAV1DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeAV1DpbSlotInfoKHR & operator=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1DpbSlotInfoKHR & operator=( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoDecodeAV1ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeAV1DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType             = StructureType::eVideoDecodeAv1DpbSlotInfoKHR;
    const void *                           pNext             = {};
    const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1DpbSlotInfoKHR>
  {
    using Type = VideoDecodeAV1DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1DpbSlotInfoKHR>
  {
    using Type = VideoDecodeAV1DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1InlineSessionParametersInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1InlineSessionParametersInfoKHR.html
  struct VideoDecodeAV1InlineSessionParametersInfoKHR
  {
    using NativeType = VkVideoDecodeAV1InlineSessionParametersInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {},
                                                                       const void *                      pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdSequenceHeader{ pStdSequenceHeader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1InlineSessionParametersInfoKHR( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeAV1InlineSessionParametersInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1InlineSessionParametersInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR &
      setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSequenceHeader = pStdSequenceHeader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1InlineSessionParametersInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoAV1SequenceHeader * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdSequenceHeader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeAV1InlineSessionParametersInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader );
#  endif
    }

    bool operator!=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType              = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR;
    const void *                      pNext              = {};
    const StdVideoAV1SequenceHeader * pStdSequenceHeader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeAV1InlineSessionParametersInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeAV1InlineSessionParametersInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1PictureInfoKHR.html
  struct VideoDecodeAV1PictureInfoKHR
  {
    using NativeType = VkVideoDecodeAV1PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo *                                   pStdPictureInfo_          = {},
                                                          std::array<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {},
                                                          uint32_t                                                               frameHeaderOffset_        = {},
                                                          uint32_t                                                               tileCount_                = {},
                                                          const uint32_t *                                                       pTileOffsets_             = {},
                                                          const uint32_t *                                                       pTileSizes_               = {},
                                                          const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , referenceNameSlotIndices{ referenceNameSlotIndices_ }
      , frameHeaderOffset{ frameHeaderOffset_ }
      , tileCount{ tileCount_ }
      , pTileOffsets{ pTileOffsets_ }
      , pTileSizes{ pTileSizes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1PictureInfoKHR( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1PictureInfoKHR( *reinterpret_cast<VideoDecodeAV1PictureInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo *                                   pStdPictureInfo_,
                                  std::array<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_,
                                  uint32_t                                                               frameHeaderOffset_,
                                  ArrayProxyNoTemporaries<const uint32_t> const &                        tileOffsets_,
                                  ArrayProxyNoTemporaries<const uint32_t> const &                        tileSizes_ = {},
                                  const void *                                                           pNext_     = nullptr )
      : pNext( pNext_ )
      , pStdPictureInfo( pStdPictureInfo_ )
      , referenceNameSlotIndices( referenceNameSlotIndices_ )
      , frameHeaderOffset( frameHeaderOffset_ )
      , tileCount( static_cast<uint32_t>( tileOffsets_.size() ) )
      , pTileOffsets( tileOffsets_.data() )
      , pTileSizes( tileSizes_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( tileOffsets_.size() == tileSizes_.size() );
#    else
      if ( tileOffsets_.size() != tileSizes_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::VideoDecodeAV1PictureInfoKHR::VideoDecodeAV1PictureInfoKHR: tileOffsets_.size() != tileSizes_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeAV1PictureInfoKHR & operator=( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1PictureInfoKHR & operator=( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR &
      setReferenceNameSlotIndices( std::array<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceNameSlotIndices = referenceNameSlotIndices_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setFrameHeaderOffset( uint32_t frameHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      frameHeaderOffset = frameHeaderOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setTileCount( uint32_t tileCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tileCount = tileCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileOffsets( const uint32_t * pTileOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pTileOffsets = pTileOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeAV1PictureInfoKHR & setTileOffsets( ArrayProxyNoTemporaries<const uint32_t> const & tileOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      tileCount    = static_cast<uint32_t>( tileOffsets_.size() );
      pTileOffsets = tileOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileSizes( const uint32_t * pTileSizes_ ) VULKAN_HPP_NOEXCEPT
    {
      pTileSizes = pTileSizes_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeAV1PictureInfoKHR & setTileSizes( ArrayProxyNoTemporaries<const uint32_t> const & tileSizes_ ) VULKAN_HPP_NOEXCEPT
    {
      tileCount  = static_cast<uint32_t>( tileSizes_.size() );
      pTileSizes = tileSizes_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               const StdVideoDecodeAV1PictureInfo * const &,
               ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const &,
               uint32_t const &,
               uint32_t const &,
               const uint32_t * const &,
               const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, frameHeaderOffset, tileCount, pTileOffsets, pTileSizes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeAV1PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
             ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( frameHeaderOffset == rhs.frameHeaderOffset ) && ( tileCount == rhs.tileCount ) &&
             ( pTileOffsets == rhs.pTileOffsets ) && ( pTileSizes == rhs.pTileSizes );
#  endif
    }

    bool operator!=( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                                      sType                    = StructureType::eVideoDecodeAv1PictureInfoKHR;
    const void *                                                       pNext                    = {};
    const StdVideoDecodeAV1PictureInfo *                               pStdPictureInfo          = {};
    ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices = {};
    uint32_t                                                           frameHeaderOffset        = {};
    uint32_t                                                           tileCount                = {};
    const uint32_t *                                                   pTileOffsets             = {};
    const uint32_t *                                                   pTileSizes               = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1PictureInfoKHR>
  {
    using Type = VideoDecodeAV1PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1PictureInfoKHR>
  {
    using Type = VideoDecodeAV1PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1ProfileInfoKHR.html
  struct VideoDecodeAV1ProfileInfoKHR
  {
    using NativeType = VkVideoDecodeAV1ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoDecodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, Bool32 filmGrainSupport_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfile{ stdProfile_ }
      , filmGrainSupport{ filmGrainSupport_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1ProfileInfoKHR( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1ProfileInfoKHR( *reinterpret_cast<VideoDecodeAV1ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeAV1ProfileInfoKHR & operator=( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1ProfileInfoKHR & operator=( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfile = stdProfile_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setFilmGrainSupport( Bool32 filmGrainSupport_ ) VULKAN_HPP_NOEXCEPT
    {
      filmGrainSupport = filmGrainSupport_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoAV1Profile const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfile, filmGrainSupport );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = filmGrainSupport <=> rhs.filmGrainSupport; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ) &&
             ( filmGrainSupport == rhs.filmGrainSupport );
    }

    bool operator!=( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType      sType            = StructureType::eVideoDecodeAv1ProfileInfoKHR;
    const void *       pNext            = {};
    StdVideoAV1Profile stdProfile       = {};
    Bool32             filmGrainSupport = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1ProfileInfoKHR>
  {
    using Type = VideoDecodeAV1ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1ProfileInfoKHR>
  {
    using Type = VideoDecodeAV1ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeAV1SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1SessionParametersCreateInfoKHR.html
  struct VideoDecodeAV1SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoDecodeAV1SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {},
                                                                       const void *                      pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdSequenceHeader{ pStdSequenceHeader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeAV1SessionParametersCreateInfoKHR( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeAV1SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeAV1SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR &
      setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSequenceHeader = pStdSequenceHeader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeAV1SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoAV1SequenceHeader * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdSequenceHeader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeAV1SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader );
#  endif
    }

    bool operator!=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType              = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR;
    const void *                      pNext              = {};
    const StdVideoAV1SequenceHeader * pStdSequenceHeader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeAV1SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeAV1SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeAV1SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeCapabilitiesKHR.html
  struct VideoDecodeCapabilitiesKHR
  {
    using NativeType = VkVideoDecodeCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilityFlagsKHR flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeCapabilitiesKHR( *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeCapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeCapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeCapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, VideoDecodeCapabilityFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                 sType = StructureType::eVideoDecodeCapabilitiesKHR;
    void *                        pNext = {};
    VideoDecodeCapabilityFlagsKHR flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeCapabilitiesKHR>
  {
    using Type = VideoDecodeCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeCapabilitiesKHR>
  {
    using Type = VideoDecodeCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264CapabilitiesKHR.html
  struct VideoDecodeH264CapabilitiesKHR
  {
    using NativeType = VkVideoDecodeH264CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( StdVideoH264LevelIdc maxLevelIdc_            = {},
                                                         Offset2D             fieldOffsetGranularity_ = {},
                                                         void *               pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxLevelIdc{ maxLevelIdc_ }
      , fieldOffsetGranularity{ fieldOffsetGranularity_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264CapabilitiesKHR( *reinterpret_cast<VideoDecodeH264CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, StdVideoH264LevelIdc const &, Offset2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) &&
             ( fieldOffsetGranularity == rhs.fieldOffsetGranularity );
    }

    bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType        sType                  = StructureType::eVideoDecodeH264CapabilitiesKHR;
    void *               pNext                  = {};
    StdVideoH264LevelIdc maxLevelIdc            = {};
    Offset2D             fieldOffsetGranularity = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264CapabilitiesKHR>
  {
    using Type = VideoDecodeH264CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesKHR>
  {
    using Type = VideoDecodeH264CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264DpbSlotInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264DpbSlotInfoKHR.html
  struct VideoDecodeH264DpbSlotInfoKHR
  {
    using NativeType = VkVideoDecodeH264DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {},
                                                        const void *                            pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH264DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR &
      setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoDecodeH264ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType             = StructureType::eVideoDecodeH264DpbSlotInfoKHR;
    const void *                            pNext             = {};
    const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264DpbSlotInfoKHR>
  {
    using Type = VideoDecodeH264DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoKHR>
  {
    using Type = VideoDecodeH264DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264InlineSessionParametersInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264InlineSessionParametersInfoKHR.html
  struct VideoDecodeH264InlineSessionParametersInfoKHR
  {
    using NativeType = VkVideoDecodeH264InlineSessionParametersInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264InlineSessionParametersInfoKHR( const StdVideoH264SequenceParameterSet * pStdSPS_ = {},
                                                                        const StdVideoH264PictureParameterSet *  pStdPPS_ = {},
                                                                        const void *                             pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdSPS{ pStdSPS_ }
      , pStdPPS{ pStdPPS_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264InlineSessionParametersInfoKHR( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264InlineSessionParametersInfoKHR( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeH264InlineSessionParametersInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264InlineSessionParametersInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH264SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPS = pStdSPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH264PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPS = pStdPPS_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH264InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH264InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH264InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264InlineSessionParametersInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoH264SequenceParameterSet * const &, const StdVideoH264PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdSPS, pStdPPS );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264InlineSessionParametersInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS );
#  endif
    }

    bool operator!=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType   = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR;
    const void *                             pNext   = {};
    const StdVideoH264SequenceParameterSet * pStdSPS = {};
    const StdVideoH264PictureParameterSet *  pStdPPS = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeH264InlineSessionParametersInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeH264InlineSessionParametersInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264PictureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264PictureInfoKHR.html
  struct VideoDecodeH264PictureInfoKHR
  {
    using NativeType = VkVideoDecodeH264PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {},
                                                        uint32_t                              sliceCount_      = {},
                                                        const uint32_t *                      pSliceOffsets_   = {},
                                                        const void *                          pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , sliceCount{ sliceCount_ }
      , pSliceOffsets{ pSliceOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264PictureInfoKHR( *reinterpret_cast<VideoDecodeH264PictureInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo *           pStdPictureInfo_,
                                   ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_,
                                   const void *                                    pNext_ = nullptr )
      : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast<uint32_t>( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceCount = sliceCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pSliceOffsets = pSliceOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264PictureInfoKHR & setSliceOffsets( ArrayProxyNoTemporaries<const uint32_t> const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceCount    = static_cast<uint32_t>( sliceOffsets_.size() );
      pSliceOffsets = sliceOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoDecodeH264PictureInfo * const &, uint32_t const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) &&
             ( pSliceOffsets == rhs.pSliceOffsets );
#  endif
    }

    bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType           = StructureType::eVideoDecodeH264PictureInfoKHR;
    const void *                          pNext           = {};
    const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
    uint32_t                              sliceCount      = {};
    const uint32_t *                      pSliceOffsets   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264PictureInfoKHR>
  {
    using Type = VideoDecodeH264PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoKHR>
  {
    using Type = VideoDecodeH264PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264ProfileInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264ProfileInfoKHR.html
  struct VideoDecodeH264ProfileInfoKHR
  {
    using NativeType = VkVideoDecodeH264ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264ProfileInfoKHR( StdVideoH264ProfileIdc                  stdProfileIdc_ = {},
                                     VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive,
                                     const void *                            pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfileIdc{ stdProfileIdc_ }
      , pictureLayout{ pictureLayout_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264ProfileInfoKHR( *reinterpret_cast<VideoDecodeH264ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPictureLayout( VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pictureLayout = pictureLayout_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoH264ProfileIdc const &, VideoDecodeH264PictureLayoutFlagBitsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfileIdc, pictureLayout );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) &&
             ( pictureLayout == rhs.pictureLayout );
    }

    bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                           sType         = StructureType::eVideoDecodeH264ProfileInfoKHR;
    const void *                            pNext         = {};
    StdVideoH264ProfileIdc                  stdProfileIdc = {};
    VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264ProfileInfoKHR>
  {
    using Type = VideoDecodeH264ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileInfoKHR>
  {
    using Type = VideoDecodeH264ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264SessionParametersAddInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersAddInfoKHR.html
  struct VideoDecodeH264SessionParametersAddInfoKHR
  {
    using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( uint32_t                                 stdSPSCount_ = {},
                                                                     const StdVideoH264SequenceParameterSet * pStdSPSs_    = {},
                                                                     uint32_t                                 stdPPSCount_ = {},
                                                                     const StdVideoH264PictureParameterSet *  pStdPPSs_    = {},
                                                                     const void *                             pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdSPSCount{ stdSPSCount_ }
      , pStdSPSs{ pStdSPSs_ }
      , stdPPSCount{ stdPPSCount_ }
      , pStdPPSs{ pStdPPSs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoKHR( ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_,
                                                ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const &  stdPPSs_ = {},
                                                const void *                                                            pNext_   = nullptr )
      : pNext( pNext_ )
      , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
      , pStdSPSs( stdSPSs_.data() )
      , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
      , pStdPPSs( stdPPSs_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = stdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPSs = pStdSPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoKHR &
      setStdSPSs( ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
      pStdSPSs    = stdSPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = stdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPSs = pStdPPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoKHR &
      setStdPPSs( ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
      pStdPPSs    = stdPPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const StdVideoH264SequenceParameterSet * const &,
               uint32_t const &,
               const StdVideoH264PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) &&
             ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
#  endif
    }

    bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType       = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR;
    const void *                             pNext       = {};
    uint32_t                                 stdSPSCount = {};
    const StdVideoH264SequenceParameterSet * pStdSPSs    = {};
    uint32_t                                 stdPPSCount = {};
    const StdVideoH264PictureParameterSet *  pStdPPSs    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264SessionParametersAddInfoKHR>
  {
    using Type = VideoDecodeH264SessionParametersAddInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoKHR>
  {
    using Type = VideoDecodeH264SessionParametersAddInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH264SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersCreateInfoKHR.html
  struct VideoDecodeH264SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoKHR( uint32_t                                           maxStdSPSCount_     = {},
                                                                        uint32_t                                           maxStdPPSCount_     = {},
                                                                        const VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {},
                                                                        const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxStdSPSCount{ maxStdSPSCount_ }
      , maxStdPPSCount{ maxStdPPSCount_ }
      , pParametersAddInfo{ pParametersAddInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdSPSCount = maxStdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdPPSCount = maxStdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR &
      setPParametersAddInfo( const VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VideoDecodeH264SessionParametersAddInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) &&
             ( pParametersAddInfo == rhs.pParametersAddInfo );
#  endif
    }

    bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType              = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR;
    const void *                                       pNext              = {};
    uint32_t                                           maxStdSPSCount     = {};
    uint32_t                                           maxStdPPSCount     = {};
    const VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH264SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeH264SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeH264SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265CapabilitiesKHR.html
  struct VideoDecodeH265CapabilitiesKHR
  {
    using NativeType = VkVideoDecodeH265CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxLevelIdc{ maxLevelIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265CapabilitiesKHR( *reinterpret_cast<VideoDecodeH265CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, StdVideoH265LevelIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxLevelIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
    }

    bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType        sType       = StructureType::eVideoDecodeH265CapabilitiesKHR;
    void *               pNext       = {};
    StdVideoH265LevelIdc maxLevelIdc = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265CapabilitiesKHR>
  {
    using Type = VideoDecodeH265CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesKHR>
  {
    using Type = VideoDecodeH265CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265DpbSlotInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265DpbSlotInfoKHR.html
  struct VideoDecodeH265DpbSlotInfoKHR
  {
    using NativeType = VkVideoDecodeH265DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {},
                                                        const void *                            pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast<VideoDecodeH265DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR &
      setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoDecodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoDecodeH265ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType             = StructureType::eVideoDecodeH265DpbSlotInfoKHR;
    const void *                            pNext             = {};
    const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265DpbSlotInfoKHR>
  {
    using Type = VideoDecodeH265DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoKHR>
  {
    using Type = VideoDecodeH265DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265InlineSessionParametersInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265InlineSessionParametersInfoKHR.html
  struct VideoDecodeH265InlineSessionParametersInfoKHR
  {
    using NativeType = VkVideoDecodeH265InlineSessionParametersInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265InlineSessionParametersInfoKHR( const StdVideoH265VideoParameterSet *    pStdVPS_ = {},
                                                                        const StdVideoH265SequenceParameterSet * pStdSPS_ = {},
                                                                        const StdVideoH265PictureParameterSet *  pStdPPS_ = {},
                                                                        const void *                             pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdVPS{ pStdVPS_ }
      , pStdSPS{ pStdSPS_ }
      , pStdPPS{ pStdPPS_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH265InlineSessionParametersInfoKHR( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265InlineSessionParametersInfoKHR( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265InlineSessionParametersInfoKHR( *reinterpret_cast<VideoDecodeH265InlineSessionParametersInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265InlineSessionParametersInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdVPS( const StdVideoH265VideoParameterSet * pStdVPS_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdVPS = pStdVPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH265SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPS = pStdSPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH265PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPS = pStdPPS_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH265InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH265InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265InlineSessionParametersInfoKHR *>( this );
    }

    operator VkVideoDecodeH265InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265InlineSessionParametersInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               const StdVideoH265VideoParameterSet * const &,
               const StdVideoH265SequenceParameterSet * const &,
               const StdVideoH265PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdVPS, pStdSPS, pStdPPS );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265InlineSessionParametersInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdVPS == rhs.pStdVPS ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS );
#  endif
    }

    bool operator!=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType   = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR;
    const void *                             pNext   = {};
    const StdVideoH265VideoParameterSet *    pStdVPS = {};
    const StdVideoH265SequenceParameterSet * pStdSPS = {};
    const StdVideoH265PictureParameterSet *  pStdPPS = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeH265InlineSessionParametersInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR>
  {
    using Type = VideoDecodeH265InlineSessionParametersInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265PictureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265PictureInfoKHR.html
  struct VideoDecodeH265PictureInfoKHR
  {
    using NativeType = VkVideoDecodeH265PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_      = {},
                                                        uint32_t                              sliceSegmentCount_    = {},
                                                        const uint32_t *                      pSliceSegmentOffsets_ = {},
                                                        const void *                          pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , sliceSegmentCount{ sliceSegmentCount_ }
      , pSliceSegmentOffsets{ pSliceSegmentOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265PictureInfoKHR( *reinterpret_cast<VideoDecodeH265PictureInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo *           pStdPictureInfo_,
                                   ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_,
                                   const void *                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , pStdPictureInfo( pStdPictureInfo_ )
      , sliceSegmentCount( static_cast<uint32_t>( sliceSegmentOffsets_.size() ) )
      , pSliceSegmentOffsets( sliceSegmentOffsets_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceSegmentCount = sliceSegmentCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pSliceSegmentOffsets = pSliceSegmentOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265PictureInfoKHR & setSliceSegmentOffsets( ArrayProxyNoTemporaries<const uint32_t> const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      sliceSegmentCount    = static_cast<uint32_t>( sliceSegmentOffsets_.size() );
      pSliceSegmentOffsets = sliceSegmentOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoDecodeH265PictureInfo * const &, uint32_t const &, const uint32_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceSegmentCount == rhs.sliceSegmentCount ) &&
             ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets );
#  endif
    }

    bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                = StructureType::eVideoDecodeH265PictureInfoKHR;
    const void *                          pNext                = {};
    const StdVideoDecodeH265PictureInfo * pStdPictureInfo      = {};
    uint32_t                              sliceSegmentCount    = {};
    const uint32_t *                      pSliceSegmentOffsets = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265PictureInfoKHR>
  {
    using Type = VideoDecodeH265PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoKHR>
  {
    using Type = VideoDecodeH265PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265ProfileInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265ProfileInfoKHR.html
  struct VideoDecodeH265ProfileInfoKHR
  {
    using NativeType = VkVideoDecodeH265ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfileIdc{ stdProfileIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265ProfileInfoKHR( *reinterpret_cast<VideoDecodeH265ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoH265ProfileIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfileIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
    }

    bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType          sType         = StructureType::eVideoDecodeH265ProfileInfoKHR;
    const void *           pNext         = {};
    StdVideoH265ProfileIdc stdProfileIdc = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265ProfileInfoKHR>
  {
    using Type = VideoDecodeH265ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileInfoKHR>
  {
    using Type = VideoDecodeH265ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265SessionParametersAddInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersAddInfoKHR.html
  struct VideoDecodeH265SessionParametersAddInfoKHR
  {
    using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( uint32_t                                 stdVPSCount_ = {},
                                                                     const StdVideoH265VideoParameterSet *    pStdVPSs_    = {},
                                                                     uint32_t                                 stdSPSCount_ = {},
                                                                     const StdVideoH265SequenceParameterSet * pStdSPSs_    = {},
                                                                     uint32_t                                 stdPPSCount_ = {},
                                                                     const StdVideoH265PictureParameterSet *  pStdPPSs_    = {},
                                                                     const void *                             pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdVPSCount{ stdVPSCount_ }
      , pStdVPSs{ pStdVPSs_ }
      , stdSPSCount{ stdSPSCount_ }
      , pStdSPSs{ pStdSPSs_ }
      , stdPPSCount{ stdPPSCount_ }
      , pStdPPSs{ pStdPPSs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoKHR( ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const &    stdVPSs_,
                                                ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {},
                                                ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const &  stdPPSs_ = {},
                                                const void *                                                            pNext_   = nullptr )
      : pNext( pNext_ )
      , stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) )
      , pStdVPSs( stdVPSs_.data() )
      , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
      , pStdSPSs( stdSPSs_.data() )
      , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
      , pStdPPSs( stdPPSs_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdVPSCount = stdVPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdVPSs = pStdVPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSs( ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
      pStdVPSs    = stdVPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = stdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPSs = pStdSPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoKHR &
      setStdSPSs( ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
      pStdSPSs    = stdSPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = stdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPSs = pStdPPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoKHR &
      setStdPPSs( ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
      pStdPPSs    = stdPPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const StdVideoH265VideoParameterSet * const &,
               uint32_t const &,
               const StdVideoH265SequenceParameterSet * const &,
               uint32_t const &,
               const StdVideoH265PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) &&
             ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
#  endif
    }

    bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType       = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR;
    const void *                             pNext       = {};
    uint32_t                                 stdVPSCount = {};
    const StdVideoH265VideoParameterSet *    pStdVPSs    = {};
    uint32_t                                 stdSPSCount = {};
    const StdVideoH265SequenceParameterSet * pStdSPSs    = {};
    uint32_t                                 stdPPSCount = {};
    const StdVideoH265PictureParameterSet *  pStdPPSs    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265SessionParametersAddInfoKHR>
  {
    using Type = VideoDecodeH265SessionParametersAddInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoKHR>
  {
    using Type = VideoDecodeH265SessionParametersAddInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeH265SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersCreateInfoKHR.html
  struct VideoDecodeH265SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoKHR( uint32_t                                           maxStdVPSCount_     = {},
                                                                        uint32_t                                           maxStdSPSCount_     = {},
                                                                        uint32_t                                           maxStdPPSCount_     = {},
                                                                        const VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {},
                                                                        const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxStdVPSCount{ maxStdVPSCount_ }
      , maxStdSPSCount{ maxStdSPSCount_ }
      , maxStdPPSCount{ maxStdPPSCount_ }
      , pParametersAddInfo{ pParametersAddInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdVPSCount = maxStdVPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdSPSCount = maxStdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdPPSCount = maxStdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR &
      setPParametersAddInfo( const VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoDecodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               const VideoDecodeH265SessionParametersAddInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) &&
             ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
#  endif
    }

    bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType              = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR;
    const void *                                       pNext              = {};
    uint32_t                                           maxStdVPSCount     = {};
    uint32_t                                           maxStdSPSCount     = {};
    uint32_t                                           maxStdPPSCount     = {};
    const VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeH265SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeH265SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR>
  {
    using Type = VideoDecodeH265SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeInfoKHR.html
  struct VideoDecodeInfoKHR
  {
    using NativeType = VkVideoDecodeInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeFlagsKHR               flags_               = {},
                                             Buffer                            srcBuffer_           = {},
                                             DeviceSize                        srcBufferOffset_     = {},
                                             DeviceSize                        srcBufferRange_      = {},
                                             VideoPictureResourceInfoKHR       dstPictureResource_  = {},
                                             const VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {},
                                             uint32_t                          referenceSlotCount_  = {},
                                             const VideoReferenceSlotInfoKHR * pReferenceSlots_     = {},
                                             const void *                      pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , srcBuffer{ srcBuffer_ }
      , srcBufferOffset{ srcBufferOffset_ }
      , srcBufferRange{ srcBufferRange_ }
      , dstPictureResource{ dstPictureResource_ }
      , pSetupReferenceSlot{ pSetupReferenceSlot_ }
      , referenceSlotCount{ referenceSlotCount_ }
      , pReferenceSlots{ pReferenceSlots_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeInfoKHR( VideoDecodeFlagsKHR                                              flags_,
                        Buffer                                                           srcBuffer_,
                        DeviceSize                                                       srcBufferOffset_,
                        DeviceSize                                                       srcBufferRange_,
                        VideoPictureResourceInfoKHR                                      dstPictureResource_,
                        const VideoReferenceSlotInfoKHR *                                pSetupReferenceSlot_,
                        ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_,
                        const void *                                                     pNext_ = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , srcBuffer( srcBuffer_ )
      , srcBufferOffset( srcBufferOffset_ )
      , srcBufferRange( srcBufferRange_ )
      , dstPictureResource( dstPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBufferOffset = srcBufferOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBufferRange = srcBufferRange_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource( VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstPictureResource = dstPictureResource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot( const VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetupReferenceSlot = pSetupReferenceSlot_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPReferenceSlots( const VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeInfoKHR & setReferenceSlots( ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeInfoKHR *>( this );
    }

    operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeInfoKHR *>( this );
    }

    operator VkVideoDecodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeInfoKHR *>( this );
    }

    operator VkVideoDecodeInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoDecodeFlagsKHR const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &,
               VideoPictureResourceInfoKHR const &,
               const VideoReferenceSlotInfoKHR * const &,
               uint32_t const &,
               const VideoReferenceSlotInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie(
        sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) &&
             ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) &&
             ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
             ( pReferenceSlots == rhs.pReferenceSlots );
#  endif
    }

    bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType               = StructureType::eVideoDecodeInfoKHR;
    const void *                      pNext               = {};
    VideoDecodeFlagsKHR               flags               = {};
    Buffer                            srcBuffer           = {};
    DeviceSize                        srcBufferOffset     = {};
    DeviceSize                        srcBufferRange      = {};
    VideoPictureResourceInfoKHR       dstPictureResource  = {};
    const VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {};
    uint32_t                          referenceSlotCount  = {};
    const VideoReferenceSlotInfoKHR * pReferenceSlots     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeInfoKHR>
  {
    using Type = VideoDecodeInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
  {
    using Type = VideoDecodeInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeUsageInfoKHR.html
  struct VideoDecodeUsageInfoKHR
  {
    using NativeType = VkVideoDecodeUsageInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeUsageInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoUsageHints{ videoUsageHints_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeUsageInfoKHR( *reinterpret_cast<VideoDecodeUsageInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeUsageInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
    {
      videoUsageHints = videoUsageHints_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeUsageInfoKHR *>( this );
    }

    operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeUsageInfoKHR *>( this );
    }

    operator VkVideoDecodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeUsageInfoKHR *>( this );
    }

    operator VkVideoDecodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeUsageInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoDecodeUsageFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoUsageHints );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints );
#  endif
    }

    bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType            sType           = StructureType::eVideoDecodeUsageInfoKHR;
    const void *             pNext           = {};
    VideoDecodeUsageFlagsKHR videoUsageHints = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeUsageInfoKHR>
  {
    using Type = VideoDecodeUsageInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeUsageInfoKHR>
  {
    using Type = VideoDecodeUsageInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeVP9CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9CapabilitiesKHR.html
  struct VideoDecodeVP9CapabilitiesKHR
  {
    using NativeType = VkVideoDecodeVP9CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeVp9CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeVP9CapabilitiesKHR( StdVideoVP9Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxLevel{ maxLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeVP9CapabilitiesKHR( VideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeVP9CapabilitiesKHR( VkVideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeVP9CapabilitiesKHR( *reinterpret_cast<VideoDecodeVP9CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoDecodeVP9CapabilitiesKHR & operator=( VideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeVP9CapabilitiesKHR & operator=( VkVideoDecodeVP9CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeVP9CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeVP9CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeVP9CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeVP9CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeVP9CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeVP9CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeVP9CapabilitiesKHR *>( this );
    }

    operator VkVideoDecodeVP9CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeVP9CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, StdVideoVP9Level const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoVP9Level ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoVP9Level ) ) == 0 );
    }

    bool operator!=( VideoDecodeVP9CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType    sType    = StructureType::eVideoDecodeVp9CapabilitiesKHR;
    void *           pNext    = {};
    StdVideoVP9Level maxLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeVP9CapabilitiesKHR>
  {
    using Type = VideoDecodeVP9CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeVp9CapabilitiesKHR>
  {
    using Type = VideoDecodeVP9CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoDecodeVP9PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9PictureInfoKHR.html
  struct VideoDecodeVP9PictureInfoKHR
  {
    using NativeType = VkVideoDecodeVP9PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeVp9PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR( const StdVideoDecodeVP9PictureInfo *                                   pStdPictureInfo_          = {},
                                                          std::array<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {},
                                                          uint32_t                                                               uncompressedHeaderOffset_ = {},
                                                          uint32_t                                                               compressedHeaderOffset_   = {},
                                                          uint32_t                                                               tilesOffset_              = {},
                                                          const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , referenceNameSlotIndices{ referenceNameSlotIndices_ }
      , uncompressedHeaderOffset{ uncompressedHeaderOffset_ }
      , compressedHeaderOffset{ compressedHeaderOffset_ }
      , tilesOffset{ tilesOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR( VideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeVP9PictureInfoKHR( VkVideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeVP9PictureInfoKHR( *reinterpret_cast<VideoDecodeVP9PictureInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeVP9PictureInfoKHR & operator=( VideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeVP9PictureInfoKHR & operator=( VkVideoDecodeVP9PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeVP9PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeVP9PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR &
      setReferenceNameSlotIndices( std::array<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceNameSlotIndices = referenceNameSlotIndices_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setUncompressedHeaderOffset( uint32_t uncompressedHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      uncompressedHeaderOffset = uncompressedHeaderOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setCompressedHeaderOffset( uint32_t compressedHeaderOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      compressedHeaderOffset = compressedHeaderOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9PictureInfoKHR & setTilesOffset( uint32_t tilesOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      tilesOffset = tilesOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeVP9PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeVP9PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeVP9PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeVP9PictureInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeVP9PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               const StdVideoDecodeVP9PictureInfo * const &,
               ArrayWrapper1D<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, uncompressedHeaderOffset, compressedHeaderOffset, tilesOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeVP9PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoDecodeVP9PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
             ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( uncompressedHeaderOffset == rhs.uncompressedHeaderOffset ) &&
             ( compressedHeaderOffset == rhs.compressedHeaderOffset ) && ( tilesOffset == rhs.tilesOffset );
#  endif
    }

    bool operator!=( VideoDecodeVP9PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                                      sType                    = StructureType::eVideoDecodeVp9PictureInfoKHR;
    const void *                                                       pNext                    = {};
    const StdVideoDecodeVP9PictureInfo *                               pStdPictureInfo          = {};
    ArrayWrapper1D<int32_t, VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices = {};
    uint32_t                                                           uncompressedHeaderOffset = {};
    uint32_t                                                           compressedHeaderOffset   = {};
    uint32_t                                                           tilesOffset              = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeVP9PictureInfoKHR>
  {
    using Type = VideoDecodeVP9PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeVp9PictureInfoKHR>
  {
    using Type = VideoDecodeVP9PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoDecodeVP9ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeVP9ProfileInfoKHR.html
  struct VideoDecodeVP9ProfileInfoKHR
  {
    using NativeType = VkVideoDecodeVP9ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoDecodeVp9ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeVP9ProfileInfoKHR( StdVideoVP9Profile stdProfile_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfile{ stdProfile_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoDecodeVP9ProfileInfoKHR( VideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeVP9ProfileInfoKHR( VkVideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeVP9ProfileInfoKHR( *reinterpret_cast<VideoDecodeVP9ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoDecodeVP9ProfileInfoKHR & operator=( VideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoDecodeVP9ProfileInfoKHR & operator=( VkVideoDecodeVP9ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoDecodeVP9ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeVP9ProfileInfoKHR & setStdProfile( StdVideoVP9Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfile = stdProfile_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoDecodeVP9ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeVP9ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeVP9ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoDecodeVP9ProfileInfoKHR *>( this );
    }

    operator VkVideoDecodeVP9ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoDecodeVP9ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoVP9Profile const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfile );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoVP9Profile ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoVP9Profile ) ) == 0 );
    }

    bool operator!=( VideoDecodeVP9ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType      sType      = StructureType::eVideoDecodeVp9ProfileInfoKHR;
    const void *       pNext      = {};
    StdVideoVP9Profile stdProfile = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoDecodeVP9ProfileInfoKHR>
  {
    using Type = VideoDecodeVP9ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeVp9ProfileInfoKHR>
  {
    using Type = VideoDecodeVP9ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1CapabilitiesKHR.html
  struct VideoEncodeAV1CapabilitiesKHR
  {
    using NativeType = VkVideoEncodeAV1CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilityFlagsKHR     flags_                                         = {},
                                                        StdVideoAV1Level                     maxLevel_                                      = {},
                                                        Extent2D                             codedPictureAlignment_                         = {},
                                                        Extent2D                             maxTiles_                                      = {},
                                                        Extent2D                             minTileSize_                                   = {},
                                                        Extent2D                             maxTileSize_                                   = {},
                                                        VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes_                               = {},
                                                        uint32_t                             maxSingleReferenceCount_                       = {},
                                                        uint32_t                             singleReferenceNameMask_                       = {},
                                                        uint32_t                             maxUnidirectionalCompoundReferenceCount_       = {},
                                                        uint32_t                             maxUnidirectionalCompoundGroup1ReferenceCount_ = {},
                                                        uint32_t                             unidirectionalCompoundReferenceNameMask_       = {},
                                                        uint32_t                             maxBidirectionalCompoundReferenceCount_        = {},
                                                        uint32_t                             maxBidirectionalCompoundGroup1ReferenceCount_  = {},
                                                        uint32_t                             maxBidirectionalCompoundGroup2ReferenceCount_  = {},
                                                        uint32_t                             bidirectionalCompoundReferenceNameMask_        = {},
                                                        uint32_t                             maxTemporalLayerCount_                         = {},
                                                        uint32_t                             maxSpatialLayerCount_                          = {},
                                                        uint32_t                             maxOperatingPoints_                            = {},
                                                        uint32_t                             minQIndex_                                     = {},
                                                        uint32_t                             maxQIndex_                                     = {},
                                                        Bool32                               prefersGopRemainingFrames_                     = {},
                                                        Bool32                               requiresGopRemainingFrames_                    = {},
                                                        VideoEncodeAV1StdFlagsKHR            stdSyntaxFlags_                                = {},
                                                        void *                               pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , maxLevel{ maxLevel_ }
      , codedPictureAlignment{ codedPictureAlignment_ }
      , maxTiles{ maxTiles_ }
      , minTileSize{ minTileSize_ }
      , maxTileSize{ maxTileSize_ }
      , superblockSizes{ superblockSizes_ }
      , maxSingleReferenceCount{ maxSingleReferenceCount_ }
      , singleReferenceNameMask{ singleReferenceNameMask_ }
      , maxUnidirectionalCompoundReferenceCount{ maxUnidirectionalCompoundReferenceCount_ }
      , maxUnidirectionalCompoundGroup1ReferenceCount{ maxUnidirectionalCompoundGroup1ReferenceCount_ }
      , unidirectionalCompoundReferenceNameMask{ unidirectionalCompoundReferenceNameMask_ }
      , maxBidirectionalCompoundReferenceCount{ maxBidirectionalCompoundReferenceCount_ }
      , maxBidirectionalCompoundGroup1ReferenceCount{ maxBidirectionalCompoundGroup1ReferenceCount_ }
      , maxBidirectionalCompoundGroup2ReferenceCount{ maxBidirectionalCompoundGroup2ReferenceCount_ }
      , bidirectionalCompoundReferenceNameMask{ bidirectionalCompoundReferenceNameMask_ }
      , maxTemporalLayerCount{ maxTemporalLayerCount_ }
      , maxSpatialLayerCount{ maxSpatialLayerCount_ }
      , maxOperatingPoints{ maxOperatingPoints_ }
      , minQIndex{ minQIndex_ }
      , maxQIndex{ maxQIndex_ }
      , prefersGopRemainingFrames{ prefersGopRemainingFrames_ }
      , requiresGopRemainingFrames{ requiresGopRemainingFrames_ }
      , stdSyntaxFlags{ stdSyntaxFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1CapabilitiesKHR( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1CapabilitiesKHR( *reinterpret_cast<VideoEncodeAV1CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1CapabilitiesKHR & operator=( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1CapabilitiesKHR & operator=( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeAV1CapabilityFlagsKHR const &,
               StdVideoAV1Level const &,
               Extent2D const &,
               Extent2D const &,
               Extent2D const &,
               Extent2D const &,
               VideoEncodeAV1SuperblockSizeFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &,
               VideoEncodeAV1StdFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       maxLevel,
                       codedPictureAlignment,
                       maxTiles,
                       minTileSize,
                       maxTileSize,
                       superblockSizes,
                       maxSingleReferenceCount,
                       singleReferenceNameMask,
                       maxUnidirectionalCompoundReferenceCount,
                       maxUnidirectionalCompoundGroup1ReferenceCount,
                       unidirectionalCompoundReferenceNameMask,
                       maxBidirectionalCompoundReferenceCount,
                       maxBidirectionalCompoundGroup1ReferenceCount,
                       maxBidirectionalCompoundGroup2ReferenceCount,
                       bidirectionalCompoundReferenceNameMask,
                       maxTemporalLayerCount,
                       maxSpatialLayerCount,
                       maxOperatingPoints,
                       minQIndex,
                       maxQIndex,
                       prefersGopRemainingFrames,
                       requiresGopRemainingFrames,
                       stdSyntaxFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = codedPictureAlignment <=> rhs.codedPictureAlignment; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 )
        return cmp;
      if ( auto cmp = minTileSize <=> rhs.minTileSize; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTileSize <=> rhs.maxTileSize; cmp != 0 )
        return cmp;
      if ( auto cmp = superblockSizes <=> rhs.superblockSizes; cmp != 0 )
        return cmp;
      if ( auto cmp = maxSingleReferenceCount <=> rhs.maxSingleReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = singleReferenceNameMask <=> rhs.singleReferenceNameMask; cmp != 0 )
        return cmp;
      if ( auto cmp = maxUnidirectionalCompoundReferenceCount <=> rhs.maxUnidirectionalCompoundReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxUnidirectionalCompoundGroup1ReferenceCount <=> rhs.maxUnidirectionalCompoundGroup1ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = unidirectionalCompoundReferenceNameMask <=> rhs.unidirectionalCompoundReferenceNameMask; cmp != 0 )
        return cmp;
      if ( auto cmp = maxBidirectionalCompoundReferenceCount <=> rhs.maxBidirectionalCompoundReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxBidirectionalCompoundGroup1ReferenceCount <=> rhs.maxBidirectionalCompoundGroup1ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxBidirectionalCompoundGroup2ReferenceCount <=> rhs.maxBidirectionalCompoundGroup2ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = bidirectionalCompoundReferenceNameMask <=> rhs.bidirectionalCompoundReferenceNameMask; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxSpatialLayerCount <=> rhs.maxSpatialLayerCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxOperatingPoints <=> rhs.maxOperatingPoints; cmp != 0 )
        return cmp;
      if ( auto cmp = minQIndex <=> rhs.minQIndex; cmp != 0 )
        return cmp;
      if ( auto cmp = maxQIndex <=> rhs.maxQIndex; cmp != 0 )
        return cmp;
      if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ) && ( codedPictureAlignment == rhs.codedPictureAlignment ) &&
             ( maxTiles == rhs.maxTiles ) && ( minTileSize == rhs.minTileSize ) && ( maxTileSize == rhs.maxTileSize ) &&
             ( superblockSizes == rhs.superblockSizes ) && ( maxSingleReferenceCount == rhs.maxSingleReferenceCount ) &&
             ( singleReferenceNameMask == rhs.singleReferenceNameMask ) &&
             ( maxUnidirectionalCompoundReferenceCount == rhs.maxUnidirectionalCompoundReferenceCount ) &&
             ( maxUnidirectionalCompoundGroup1ReferenceCount == rhs.maxUnidirectionalCompoundGroup1ReferenceCount ) &&
             ( unidirectionalCompoundReferenceNameMask == rhs.unidirectionalCompoundReferenceNameMask ) &&
             ( maxBidirectionalCompoundReferenceCount == rhs.maxBidirectionalCompoundReferenceCount ) &&
             ( maxBidirectionalCompoundGroup1ReferenceCount == rhs.maxBidirectionalCompoundGroup1ReferenceCount ) &&
             ( maxBidirectionalCompoundGroup2ReferenceCount == rhs.maxBidirectionalCompoundGroup2ReferenceCount ) &&
             ( bidirectionalCompoundReferenceNameMask == rhs.bidirectionalCompoundReferenceNameMask ) &&
             ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && ( maxSpatialLayerCount == rhs.maxSpatialLayerCount ) &&
             ( maxOperatingPoints == rhs.maxOperatingPoints ) && ( minQIndex == rhs.minQIndex ) && ( maxQIndex == rhs.maxQIndex ) &&
             ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) &&
             ( stdSyntaxFlags == rhs.stdSyntaxFlags );
    }

    bool operator!=( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                        sType                                         = StructureType::eVideoEncodeAv1CapabilitiesKHR;
    void *                               pNext                                         = {};
    VideoEncodeAV1CapabilityFlagsKHR     flags                                         = {};
    StdVideoAV1Level                     maxLevel                                      = {};
    Extent2D                             codedPictureAlignment                         = {};
    Extent2D                             maxTiles                                      = {};
    Extent2D                             minTileSize                                   = {};
    Extent2D                             maxTileSize                                   = {};
    VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes                               = {};
    uint32_t                             maxSingleReferenceCount                       = {};
    uint32_t                             singleReferenceNameMask                       = {};
    uint32_t                             maxUnidirectionalCompoundReferenceCount       = {};
    uint32_t                             maxUnidirectionalCompoundGroup1ReferenceCount = {};
    uint32_t                             unidirectionalCompoundReferenceNameMask       = {};
    uint32_t                             maxBidirectionalCompoundReferenceCount        = {};
    uint32_t                             maxBidirectionalCompoundGroup1ReferenceCount  = {};
    uint32_t                             maxBidirectionalCompoundGroup2ReferenceCount  = {};
    uint32_t                             bidirectionalCompoundReferenceNameMask        = {};
    uint32_t                             maxTemporalLayerCount                         = {};
    uint32_t                             maxSpatialLayerCount                          = {};
    uint32_t                             maxOperatingPoints                            = {};
    uint32_t                             minQIndex                                     = {};
    uint32_t                             maxQIndex                                     = {};
    Bool32                               prefersGopRemainingFrames                     = {};
    Bool32                               requiresGopRemainingFrames                    = {};
    VideoEncodeAV1StdFlagsKHR            stdSyntaxFlags                                = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1CapabilitiesKHR>
  {
    using Type = VideoEncodeAV1CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1CapabilitiesKHR>
  {
    using Type = VideoEncodeAV1CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1DpbSlotInfoKHR.html
  struct VideoEncodeAV1DpbSlotInfoKHR
  {
    using NativeType = VkVideoEncodeAV1DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ = {},
                                                       const void *                           pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1DpbSlotInfoKHR( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeAV1DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1DpbSlotInfoKHR & operator=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1DpbSlotInfoKHR & operator=( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoEncodeAV1ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType             = StructureType::eVideoEncodeAv1DpbSlotInfoKHR;
    const void *                           pNext             = {};
    const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1DpbSlotInfoKHR>
  {
    using Type = VideoEncodeAV1DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1DpbSlotInfoKHR>
  {
    using Type = VideoEncodeAV1DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1FrameSizeKHR.html
  struct VideoEncodeAV1FrameSizeKHR
  {
    using NativeType = VkVideoEncodeAV1FrameSizeKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeAV1FrameSizeKHR( uint32_t intraFrameSize_ = {}, uint32_t predictiveFrameSize_ = {}, uint32_t bipredictiveFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : intraFrameSize{ intraFrameSize_ }
      , predictiveFrameSize{ predictiveFrameSize_ }
      , bipredictiveFrameSize{ bipredictiveFrameSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1FrameSizeKHR( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1FrameSizeKHR( *reinterpret_cast<VideoEncodeAV1FrameSizeKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1FrameSizeKHR & operator=( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1FrameSizeKHR & operator=( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1FrameSizeKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setIntraFrameSize( uint32_t intraFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      intraFrameSize = intraFrameSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setPredictiveFrameSize( uint32_t predictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      predictiveFrameSize = predictiveFrameSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setBipredictiveFrameSize( uint32_t bipredictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      bipredictiveFrameSize = bipredictiveFrameSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeAV1FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeAV1FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeAV1FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1FrameSizeKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( intraFrameSize, predictiveFrameSize, bipredictiveFrameSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1FrameSizeKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( intraFrameSize == rhs.intraFrameSize ) && ( predictiveFrameSize == rhs.predictiveFrameSize ) &&
             ( bipredictiveFrameSize == rhs.bipredictiveFrameSize );
#  endif
    }

    bool operator!=( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t intraFrameSize        = {};
    uint32_t predictiveFrameSize   = {};
    uint32_t bipredictiveFrameSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1FrameSizeKHR>
  {
    using Type = VideoEncodeAV1FrameSizeKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeAV1GopRemainingFrameInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1GopRemainingFrameInfoKHR.html
  struct VideoEncodeAV1GopRemainingFrameInfoKHR
  {
    using NativeType = VkVideoEncodeAV1GopRemainingFrameInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( Bool32       useGopRemainingFrames_    = {},
                                                                 uint32_t     gopRemainingIntra_        = {},
                                                                 uint32_t     gopRemainingPredictive_   = {},
                                                                 uint32_t     gopRemainingBipredictive_ = {},
                                                                 const void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useGopRemainingFrames{ useGopRemainingFrames_ }
      , gopRemainingIntra{ gopRemainingIntra_ }
      , gopRemainingPredictive{ gopRemainingPredictive_ }
      , gopRemainingBipredictive{ gopRemainingBipredictive_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1GopRemainingFrameInfoKHR( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeAV1GopRemainingFrameInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1GopRemainingFrameInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setUseGopRemainingFrames( Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
    {
      useGopRemainingFrames = useGopRemainingFrames_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingIntra( uint32_t gopRemainingIntra_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingIntra = gopRemainingIntra_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingPredictive( uint32_t gopRemainingPredictive_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingPredictive = gopRemainingPredictive_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingBipredictive( uint32_t gopRemainingBipredictive_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingBipredictive = gopRemainingBipredictive_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1GopRemainingFrameInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingIntra, gopRemainingPredictive, gopRemainingBipredictive );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1GopRemainingFrameInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) &&
             ( gopRemainingIntra == rhs.gopRemainingIntra ) && ( gopRemainingPredictive == rhs.gopRemainingPredictive ) &&
             ( gopRemainingBipredictive == rhs.gopRemainingBipredictive );
#  endif
    }

    bool operator!=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR;
    const void *  pNext                    = {};
    Bool32        useGopRemainingFrames    = {};
    uint32_t      gopRemainingIntra        = {};
    uint32_t      gopRemainingPredictive   = {};
    uint32_t      gopRemainingBipredictive = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeAV1GopRemainingFrameInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeAV1GopRemainingFrameInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1PictureInfoKHR.html
  struct VideoEncodeAV1PictureInfoKHR
  {
    using NativeType = VkVideoEncodeAV1PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PredictionModeKHR      predictionMode_   = VideoEncodeAV1PredictionModeKHR::eIntraOnly,
                                                          VideoEncodeAV1RateControlGroupKHR    rateControlGroup_ = VideoEncodeAV1RateControlGroupKHR::eIntra,
                                                          uint32_t                             constantQIndex_   = {},
                                                          const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_  = {},
                                                          std::array<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const & referenceNameSlotIndices_ = {},
                                                          Bool32                                                                 primaryReferenceCdfOnly_  = {},
                                                          Bool32       generateObuExtensionHeader_                                                         = {},
                                                          const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , predictionMode{ predictionMode_ }
      , rateControlGroup{ rateControlGroup_ }
      , constantQIndex{ constantQIndex_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , referenceNameSlotIndices{ referenceNameSlotIndices_ }
      , primaryReferenceCdfOnly{ primaryReferenceCdfOnly_ }
      , generateObuExtensionHeader{ generateObuExtensionHeader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1PictureInfoKHR( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1PictureInfoKHR( *reinterpret_cast<VideoEncodeAV1PictureInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1PictureInfoKHR & operator=( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1PictureInfoKHR & operator=( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPredictionMode( VideoEncodeAV1PredictionModeKHR predictionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      predictionMode = predictionMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setRateControlGroup( VideoEncodeAV1RateControlGroupKHR rateControlGroup_ ) VULKAN_HPP_NOEXCEPT
    {
      rateControlGroup = rateControlGroup_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setConstantQIndex( uint32_t constantQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      constantQIndex = constantQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR &
      setReferenceNameSlotIndices( std::array<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceNameSlotIndices = referenceNameSlotIndices_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPrimaryReferenceCdfOnly( Bool32 primaryReferenceCdfOnly_ ) VULKAN_HPP_NOEXCEPT
    {
      primaryReferenceCdfOnly = primaryReferenceCdfOnly_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setGenerateObuExtensionHeader( Bool32 generateObuExtensionHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      generateObuExtensionHeader = generateObuExtensionHeader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeAV1PredictionModeKHR const &,
               VideoEncodeAV1RateControlGroupKHR const &,
               uint32_t const &,
               const StdVideoEncodeAV1PictureInfo * const &,
               ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       predictionMode,
                       rateControlGroup,
                       constantQIndex,
                       pStdPictureInfo,
                       referenceNameSlotIndices,
                       primaryReferenceCdfOnly,
                       generateObuExtensionHeader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( predictionMode == rhs.predictionMode ) && ( rateControlGroup == rhs.rateControlGroup ) &&
             ( constantQIndex == rhs.constantQIndex ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
             ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( primaryReferenceCdfOnly == rhs.primaryReferenceCdfOnly ) &&
             ( generateObuExtensionHeader == rhs.generateObuExtensionHeader );
#  endif
    }

    bool operator!=( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                                      sType                      = StructureType::eVideoEncodeAv1PictureInfoKHR;
    const void *                                                       pNext                      = {};
    VideoEncodeAV1PredictionModeKHR                                    predictionMode             = VideoEncodeAV1PredictionModeKHR::eIntraOnly;
    VideoEncodeAV1RateControlGroupKHR                                  rateControlGroup           = VideoEncodeAV1RateControlGroupKHR::eIntra;
    uint32_t                                                           constantQIndex             = {};
    const StdVideoEncodeAV1PictureInfo *                               pStdPictureInfo            = {};
    ArrayWrapper1D<int32_t, VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR> referenceNameSlotIndices   = {};
    Bool32                                                             primaryReferenceCdfOnly    = {};
    Bool32                                                             generateObuExtensionHeader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1PictureInfoKHR>
  {
    using Type = VideoEncodeAV1PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1PictureInfoKHR>
  {
    using Type = VideoEncodeAV1PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1ProfileInfoKHR.html
  struct VideoEncodeAV1ProfileInfoKHR
  {
    using NativeType = VkVideoEncodeAV1ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfile{ stdProfile_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1ProfileInfoKHR( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1ProfileInfoKHR( *reinterpret_cast<VideoEncodeAV1ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1ProfileInfoKHR & operator=( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1ProfileInfoKHR & operator=( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfile = stdProfile_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoAV1Profile const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfile );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 );
    }

    bool operator!=( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType      sType      = StructureType::eVideoEncodeAv1ProfileInfoKHR;
    const void *       pNext      = {};
    StdVideoAV1Profile stdProfile = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1ProfileInfoKHR>
  {
    using Type = VideoEncodeAV1ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1ProfileInfoKHR>
  {
    using Type = VideoEncodeAV1ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1QIndexKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QIndexKHR.html
  struct VideoEncodeAV1QIndexKHR
  {
    using NativeType = VkVideoEncodeAV1QIndexKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeAV1QIndexKHR( uint32_t intraQIndex_ = {}, uint32_t predictiveQIndex_ = {}, uint32_t bipredictiveQIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : intraQIndex{ intraQIndex_ }
      , predictiveQIndex{ predictiveQIndex_ }
      , bipredictiveQIndex{ bipredictiveQIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1QIndexKHR( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1QIndexKHR( *reinterpret_cast<VideoEncodeAV1QIndexKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1QIndexKHR & operator=( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1QIndexKHR & operator=( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1QIndexKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setIntraQIndex( uint32_t intraQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      intraQIndex = intraQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setPredictiveQIndex( uint32_t predictiveQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      predictiveQIndex = predictiveQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setBipredictiveQIndex( uint32_t bipredictiveQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      bipredictiveQIndex = bipredictiveQIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1QIndexKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1QIndexKHR *>( this );
    }

    operator VkVideoEncodeAV1QIndexKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1QIndexKHR *>( this );
    }

    operator VkVideoEncodeAV1QIndexKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1QIndexKHR *>( this );
    }

    operator VkVideoEncodeAV1QIndexKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1QIndexKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( intraQIndex, predictiveQIndex, bipredictiveQIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1QIndexKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( intraQIndex == rhs.intraQIndex ) && ( predictiveQIndex == rhs.predictiveQIndex ) && ( bipredictiveQIndex == rhs.bipredictiveQIndex );
#  endif
    }

    bool operator!=( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t intraQIndex        = {};
    uint32_t predictiveQIndex   = {};
    uint32_t bipredictiveQIndex = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1QIndexKHR>
  {
    using Type = VideoEncodeAV1QIndexKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeAV1QualityLevelPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QualityLevelPropertiesKHR.html
  struct VideoEncodeAV1QualityLevelPropertiesKHR
  {
    using NativeType = VkVideoEncodeAV1QualityLevelPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags_                        = {},
                                                                  uint32_t                          preferredGopFrameCount_                           = {},
                                                                  uint32_t                          preferredKeyFramePeriod_                          = {},
                                                                  uint32_t                          preferredConsecutiveBipredictiveFrameCount_       = {},
                                                                  uint32_t                          preferredTemporalLayerCount_                      = {},
                                                                  VideoEncodeAV1QIndexKHR           preferredConstantQIndex_                          = {},
                                                                  uint32_t                          preferredMaxSingleReferenceCount_                 = {},
                                                                  uint32_t                          preferredSingleReferenceNameMask_                 = {},
                                                                  uint32_t                          preferredMaxUnidirectionalCompoundReferenceCount_ = {},
                                                                  uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount_                    = {},
                                                                  uint32_t preferredUnidirectionalCompoundReferenceNameMask_                          = {},
                                                                  uint32_t preferredMaxBidirectionalCompoundReferenceCount_                           = {},
                                                                  uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount_                     = {},
                                                                  uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount_                     = {},
                                                                  uint32_t preferredBidirectionalCompoundReferenceNameMask_                           = {},
                                                                  void *   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , preferredRateControlFlags{ preferredRateControlFlags_ }
      , preferredGopFrameCount{ preferredGopFrameCount_ }
      , preferredKeyFramePeriod{ preferredKeyFramePeriod_ }
      , preferredConsecutiveBipredictiveFrameCount{ preferredConsecutiveBipredictiveFrameCount_ }
      , preferredTemporalLayerCount{ preferredTemporalLayerCount_ }
      , preferredConstantQIndex{ preferredConstantQIndex_ }
      , preferredMaxSingleReferenceCount{ preferredMaxSingleReferenceCount_ }
      , preferredSingleReferenceNameMask{ preferredSingleReferenceNameMask_ }
      , preferredMaxUnidirectionalCompoundReferenceCount{ preferredMaxUnidirectionalCompoundReferenceCount_ }
      , preferredMaxUnidirectionalCompoundGroup1ReferenceCount{ preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ }
      , preferredUnidirectionalCompoundReferenceNameMask{ preferredUnidirectionalCompoundReferenceNameMask_ }
      , preferredMaxBidirectionalCompoundReferenceCount{ preferredMaxBidirectionalCompoundReferenceCount_ }
      , preferredMaxBidirectionalCompoundGroup1ReferenceCount{ preferredMaxBidirectionalCompoundGroup1ReferenceCount_ }
      , preferredMaxBidirectionalCompoundGroup2ReferenceCount{ preferredMaxBidirectionalCompoundGroup2ReferenceCount_ }
      , preferredBidirectionalCompoundReferenceNameMask{ preferredBidirectionalCompoundReferenceNameMask_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1QualityLevelPropertiesKHR( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeAV1QualityLevelPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1QualityLevelPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeAV1QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1QualityLevelPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeAV1RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               VideoEncodeAV1QIndexKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       preferredRateControlFlags,
                       preferredGopFrameCount,
                       preferredKeyFramePeriod,
                       preferredConsecutiveBipredictiveFrameCount,
                       preferredTemporalLayerCount,
                       preferredConstantQIndex,
                       preferredMaxSingleReferenceCount,
                       preferredSingleReferenceNameMask,
                       preferredMaxUnidirectionalCompoundReferenceCount,
                       preferredMaxUnidirectionalCompoundGroup1ReferenceCount,
                       preferredUnidirectionalCompoundReferenceNameMask,
                       preferredMaxBidirectionalCompoundReferenceCount,
                       preferredMaxBidirectionalCompoundGroup1ReferenceCount,
                       preferredMaxBidirectionalCompoundGroup2ReferenceCount,
                       preferredBidirectionalCompoundReferenceNameMask );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1QualityLevelPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) &&
             ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredKeyFramePeriod == rhs.preferredKeyFramePeriod ) &&
             ( preferredConsecutiveBipredictiveFrameCount == rhs.preferredConsecutiveBipredictiveFrameCount ) &&
             ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && ( preferredConstantQIndex == rhs.preferredConstantQIndex ) &&
             ( preferredMaxSingleReferenceCount == rhs.preferredMaxSingleReferenceCount ) &&
             ( preferredSingleReferenceNameMask == rhs.preferredSingleReferenceNameMask ) &&
             ( preferredMaxUnidirectionalCompoundReferenceCount == rhs.preferredMaxUnidirectionalCompoundReferenceCount ) &&
             ( preferredMaxUnidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ) &&
             ( preferredUnidirectionalCompoundReferenceNameMask == rhs.preferredUnidirectionalCompoundReferenceNameMask ) &&
             ( preferredMaxBidirectionalCompoundReferenceCount == rhs.preferredMaxBidirectionalCompoundReferenceCount ) &&
             ( preferredMaxBidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup1ReferenceCount ) &&
             ( preferredMaxBidirectionalCompoundGroup2ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup2ReferenceCount ) &&
             ( preferredBidirectionalCompoundReferenceNameMask == rhs.preferredBidirectionalCompoundReferenceNameMask );
#  endif
    }

    bool operator!=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                                                  = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR;
    void *                            pNext                                                  = {};
    VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags                              = {};
    uint32_t                          preferredGopFrameCount                                 = {};
    uint32_t                          preferredKeyFramePeriod                                = {};
    uint32_t                          preferredConsecutiveBipredictiveFrameCount             = {};
    uint32_t                          preferredTemporalLayerCount                            = {};
    VideoEncodeAV1QIndexKHR           preferredConstantQIndex                                = {};
    uint32_t                          preferredMaxSingleReferenceCount                       = {};
    uint32_t                          preferredSingleReferenceNameMask                       = {};
    uint32_t                          preferredMaxUnidirectionalCompoundReferenceCount       = {};
    uint32_t                          preferredMaxUnidirectionalCompoundGroup1ReferenceCount = {};
    uint32_t                          preferredUnidirectionalCompoundReferenceNameMask       = {};
    uint32_t                          preferredMaxBidirectionalCompoundReferenceCount        = {};
    uint32_t                          preferredMaxBidirectionalCompoundGroup1ReferenceCount  = {};
    uint32_t                          preferredMaxBidirectionalCompoundGroup2ReferenceCount  = {};
    uint32_t                          preferredBidirectionalCompoundReferenceNameMask        = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeAV1QualityLevelPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeAV1QualityLevelPropertiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QuantizationMapCapabilitiesKHR.html
  struct VideoEncodeAV1QuantizationMapCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeAV1QuantizationMapCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeAV1QuantizationMapCapabilitiesKHR( int32_t minQIndexDelta_ = {}, int32_t maxQIndexDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minQIndexDelta{ minQIndexDelta_ }
      , maxQIndexDelta{ maxQIndexDelta_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1QuantizationMapCapabilitiesKHR( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeAV1QuantizationMapCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1QuantizationMapCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minQIndexDelta, maxQIndexDelta );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQIndexDelta == rhs.minQIndexDelta ) && ( maxQIndexDelta == rhs.maxQIndexDelta );
#  endif
    }

    bool operator!=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType          = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR;
    void *        pNext          = {};
    int32_t       minQIndexDelta = {};
    int32_t       maxQIndexDelta = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1RateControlInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlInfoKHR.html
  struct VideoEncodeAV1RateControlInfoKHR
  {
    using NativeType = VkVideoEncodeAV1RateControlInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1RateControlInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlFlagsKHR flags_                             = {},
                                                           uint32_t                          gopFrameCount_                     = {},
                                                           uint32_t                          keyFramePeriod_                    = {},
                                                           uint32_t                          consecutiveBipredictiveFrameCount_ = {},
                                                           uint32_t                          temporalLayerCount_                = {},
                                                           const void *                      pNext_                             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , gopFrameCount{ gopFrameCount_ }
      , keyFramePeriod{ keyFramePeriod_ }
      , consecutiveBipredictiveFrameCount{ consecutiveBipredictiveFrameCount_ }
      , temporalLayerCount{ temporalLayerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1RateControlInfoKHR( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1RateControlInfoKHR( *reinterpret_cast<VideoEncodeAV1RateControlInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1RateControlInfoKHR & operator=( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1RateControlInfoKHR & operator=( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1RateControlInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setFlags( VideoEncodeAV1RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      gopFrameCount = gopFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setKeyFramePeriod( uint32_t keyFramePeriod_ ) VULKAN_HPP_NOEXCEPT
    {
      keyFramePeriod = keyFramePeriod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR &
      setConsecutiveBipredictiveFrameCount( uint32_t consecutiveBipredictiveFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      consecutiveBipredictiveFrameCount = consecutiveBipredictiveFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      temporalLayerCount = temporalLayerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1RateControlInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeAV1RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, gopFrameCount, keyFramePeriod, consecutiveBipredictiveFrameCount, temporalLayerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1RateControlInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) &&
             ( keyFramePeriod == rhs.keyFramePeriod ) && ( consecutiveBipredictiveFrameCount == rhs.consecutiveBipredictiveFrameCount ) &&
             ( temporalLayerCount == rhs.temporalLayerCount );
#  endif
    }

    bool operator!=( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                             = StructureType::eVideoEncodeAv1RateControlInfoKHR;
    const void *                      pNext                             = {};
    VideoEncodeAV1RateControlFlagsKHR flags                             = {};
    uint32_t                          gopFrameCount                     = {};
    uint32_t                          keyFramePeriod                    = {};
    uint32_t                          consecutiveBipredictiveFrameCount = {};
    uint32_t                          temporalLayerCount                = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1RateControlInfoKHR>
  {
    using Type = VideoEncodeAV1RateControlInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1RateControlInfoKHR>
  {
    using Type = VideoEncodeAV1RateControlInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1RateControlLayerInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlLayerInfoKHR.html
  struct VideoEncodeAV1RateControlLayerInfoKHR
  {
    using NativeType = VkVideoEncodeAV1RateControlLayerInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( Bool32                     useMinQIndex_    = {},
                                                                VideoEncodeAV1QIndexKHR    minQIndex_       = {},
                                                                Bool32                     useMaxQIndex_    = {},
                                                                VideoEncodeAV1QIndexKHR    maxQIndex_       = {},
                                                                Bool32                     useMaxFrameSize_ = {},
                                                                VideoEncodeAV1FrameSizeKHR maxFrameSize_    = {},
                                                                const void *               pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMinQIndex{ useMinQIndex_ }
      , minQIndex{ minQIndex_ }
      , useMaxQIndex{ useMaxQIndex_ }
      , maxQIndex{ maxQIndex_ }
      , useMaxFrameSize{ useMaxFrameSize_ }
      , maxFrameSize{ maxFrameSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1RateControlLayerInfoKHR( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeAV1RateControlLayerInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1RateControlLayerInfoKHR & operator=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1RateControlLayerInfoKHR & operator=( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1RateControlLayerInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMinQIndex( Bool32 useMinQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      useMinQIndex = useMinQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMinQIndex( VideoEncodeAV1QIndexKHR const & minQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      minQIndex = minQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxQIndex( Bool32 useMaxQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxQIndex = useMaxQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMaxQIndex( VideoEncodeAV1QIndexKHR const & maxQIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      maxQIndex = maxQIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxFrameSize( Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxFrameSize = useMaxFrameSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setMaxFrameSize( VideoEncodeAV1FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFrameSize = maxFrameSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1RateControlLayerInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Bool32 const &,
               VideoEncodeAV1QIndexKHR const &,
               Bool32 const &,
               VideoEncodeAV1QIndexKHR const &,
               Bool32 const &,
               VideoEncodeAV1FrameSizeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMinQIndex, minQIndex, useMaxQIndex, maxQIndex, useMaxFrameSize, maxFrameSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1RateControlLayerInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQIndex == rhs.useMinQIndex ) && ( minQIndex == rhs.minQIndex ) &&
             ( useMaxQIndex == rhs.useMaxQIndex ) && ( maxQIndex == rhs.maxQIndex ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) &&
             ( maxFrameSize == rhs.maxFrameSize );
#  endif
    }

    bool operator!=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType           = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR;
    const void *               pNext           = {};
    Bool32                     useMinQIndex    = {};
    VideoEncodeAV1QIndexKHR    minQIndex       = {};
    Bool32                     useMaxQIndex    = {};
    VideoEncodeAV1QIndexKHR    maxQIndex       = {};
    Bool32                     useMaxFrameSize = {};
    VideoEncodeAV1FrameSizeKHR maxFrameSize    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeAV1RateControlLayerInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeAV1RateControlLayerInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1SessionCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionCreateInfoKHR.html
  struct VideoEncodeAV1SessionCreateInfoKHR
  {
    using NativeType = VkVideoEncodeAV1SessionCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1SessionCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeAV1SessionCreateInfoKHR( Bool32 useMaxLevel_ = {}, StdVideoAV1Level maxLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMaxLevel{ useMaxLevel_ }
      , maxLevel{ maxLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1SessionCreateInfoKHR( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeAV1SessionCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeAV1SessionCreateInfoKHR & operator=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1SessionCreateInfoKHR & operator=( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1SessionCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setUseMaxLevel( Bool32 useMaxLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxLevel = useMaxLevel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setMaxLevel( StdVideoAV1Level maxLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLevel = maxLevel_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1SessionCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, StdVideoAV1Level const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMaxLevel, maxLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = useMaxLevel <=> rhs.useMaxLevel; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevel == rhs.useMaxLevel ) &&
             ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 );
    }

    bool operator!=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType    sType       = StructureType::eVideoEncodeAv1SessionCreateInfoKHR;
    const void *     pNext       = {};
    Bool32           useMaxLevel = {};
    StdVideoAV1Level maxLevel    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1SessionCreateInfoKHR>
  {
    using Type = VideoEncodeAV1SessionCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1SessionCreateInfoKHR>
  {
    using Type = VideoEncodeAV1SessionCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeAV1SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionParametersCreateInfoKHR.html
  struct VideoEncodeAV1SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoEncodeAV1SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader *           pStdSequenceHeader_     = {},
                                                                       const StdVideoEncodeAV1DecoderModelInfo *   pStdDecoderModelInfo_   = {},
                                                                       uint32_t                                    stdOperatingPointCount_ = {},
                                                                       const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_    = {},
                                                                       const void *                                pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdSequenceHeader{ pStdSequenceHeader_ }
      , pStdDecoderModelInfo{ pStdDecoderModelInfo_ }
      , stdOperatingPointCount{ stdOperatingPointCount_ }
      , pStdOperatingPoints{ pStdOperatingPoints_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeAV1SessionParametersCreateInfoKHR( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeAV1SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader *                                          pStdSequenceHeader_,
                                                  const StdVideoEncodeAV1DecoderModelInfo *                                  pStdDecoderModelInfo_,
                                                  ArrayProxyNoTemporaries<const StdVideoEncodeAV1OperatingPointInfo> const & stdOperatingPoints_,
                                                  const void *                                                               pNext_ = nullptr )
      : pNext( pNext_ )
      , pStdSequenceHeader( pStdSequenceHeader_ )
      , pStdDecoderModelInfo( pStdDecoderModelInfo_ )
      , stdOperatingPointCount( static_cast<uint32_t>( stdOperatingPoints_.size() ) )
      , pStdOperatingPoints( stdOperatingPoints_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeAV1SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR &
      setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSequenceHeader = pStdSequenceHeader_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR &
      setPStdDecoderModelInfo( const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdDecoderModelInfo = pStdDecoderModelInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPointCount( uint32_t stdOperatingPointCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdOperatingPointCount = stdOperatingPointCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR &
      setPStdOperatingPoints( const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdOperatingPoints = pStdOperatingPoints_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeAV1SessionParametersCreateInfoKHR &
      setStdOperatingPoints( ArrayProxyNoTemporaries<const StdVideoEncodeAV1OperatingPointInfo> const & stdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      stdOperatingPointCount = static_cast<uint32_t>( stdOperatingPoints_.size() );
      pStdOperatingPoints    = stdOperatingPoints_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeAV1SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeAV1SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               const StdVideoAV1SequenceHeader * const &,
               const StdVideoEncodeAV1DecoderModelInfo * const &,
               uint32_t const &,
               const StdVideoEncodeAV1OperatingPointInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdSequenceHeader, pStdDecoderModelInfo, stdOperatingPointCount, pStdOperatingPoints );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeAV1SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ) &&
             ( pStdDecoderModelInfo == rhs.pStdDecoderModelInfo ) && ( stdOperatingPointCount == rhs.stdOperatingPointCount ) &&
             ( pStdOperatingPoints == rhs.pStdOperatingPoints );
#  endif
    }

    bool operator!=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                               sType                  = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR;
    const void *                                pNext                  = {};
    const StdVideoAV1SequenceHeader *           pStdSequenceHeader     = {};
    const StdVideoEncodeAV1DecoderModelInfo *   pStdDecoderModelInfo   = {};
    uint32_t                                    stdOperatingPointCount = {};
    const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeAV1SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeAV1SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeAV1SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeCapabilitiesKHR.html
  struct VideoEncodeCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilityFlagsKHR      flags_                         = {},
                                                     VideoEncodeRateControlModeFlagsKHR rateControlModes_              = {},
                                                     uint32_t                           maxRateControlLayers_          = {},
                                                     uint64_t                           maxBitrate_                    = {},
                                                     uint32_t                           maxQualityLevels_              = {},
                                                     Extent2D                           encodeInputPictureGranularity_ = {},
                                                     VideoEncodeFeedbackFlagsKHR        supportedEncodeFeedbackFlags_  = {},
                                                     void *                             pNext_                         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , rateControlModes{ rateControlModes_ }
      , maxRateControlLayers{ maxRateControlLayers_ }
      , maxBitrate{ maxBitrate_ }
      , maxQualityLevels{ maxQualityLevels_ }
      , encodeInputPictureGranularity{ encodeInputPictureGranularity_ }
      , supportedEncodeFeedbackFlags{ supportedEncodeFeedbackFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeCapabilitiesKHR( *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeCapabilityFlagsKHR const &,
               VideoEncodeRateControlModeFlagsKHR const &,
               uint32_t const &,
               uint64_t const &,
               uint32_t const &,
               Extent2D const &,
               VideoEncodeFeedbackFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       rateControlModes,
                       maxRateControlLayers,
                       maxBitrate,
                       maxQualityLevels,
                       encodeInputPictureGranularity,
                       supportedEncodeFeedbackFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) &&
             ( maxRateControlLayers == rhs.maxRateControlLayers ) && ( maxBitrate == rhs.maxBitrate ) && ( maxQualityLevels == rhs.maxQualityLevels ) &&
             ( encodeInputPictureGranularity == rhs.encodeInputPictureGranularity ) && ( supportedEncodeFeedbackFlags == rhs.supportedEncodeFeedbackFlags );
#  endif
    }

    bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                         = StructureType::eVideoEncodeCapabilitiesKHR;
    void *                             pNext                         = {};
    VideoEncodeCapabilityFlagsKHR      flags                         = {};
    VideoEncodeRateControlModeFlagsKHR rateControlModes              = {};
    uint32_t                           maxRateControlLayers          = {};
    uint64_t                           maxBitrate                    = {};
    uint32_t                           maxQualityLevels              = {};
    Extent2D                           encodeInputPictureGranularity = {};
    VideoEncodeFeedbackFlagsKHR        supportedEncodeFeedbackFlags  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeCapabilitiesKHR>
  {
    using Type = VideoEncodeCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeCapabilitiesKHR>
  {
    using Type = VideoEncodeCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264CapabilitiesKHR.html
  struct VideoEncodeH264CapabilitiesKHR
  {
    using NativeType = VkVideoEncodeH264CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilityFlagsKHR flags_                            = {},
                                                         StdVideoH264LevelIdc              maxLevelIdc_                      = {},
                                                         uint32_t                          maxSliceCount_                    = {},
                                                         uint32_t                          maxPPictureL0ReferenceCount_      = {},
                                                         uint32_t                          maxBPictureL0ReferenceCount_      = {},
                                                         uint32_t                          maxL1ReferenceCount_              = {},
                                                         uint32_t                          maxTemporalLayerCount_            = {},
                                                         Bool32                            expectDyadicTemporalLayerPattern_ = {},
                                                         int32_t                           minQp_                            = {},
                                                         int32_t                           maxQp_                            = {},
                                                         Bool32                            prefersGopRemainingFrames_        = {},
                                                         Bool32                            requiresGopRemainingFrames_       = {},
                                                         VideoEncodeH264StdFlagsKHR        stdSyntaxFlags_                   = {},
                                                         void *                            pNext_                            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , maxLevelIdc{ maxLevelIdc_ }
      , maxSliceCount{ maxSliceCount_ }
      , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ }
      , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ }
      , maxL1ReferenceCount{ maxL1ReferenceCount_ }
      , maxTemporalLayerCount{ maxTemporalLayerCount_ }
      , expectDyadicTemporalLayerPattern{ expectDyadicTemporalLayerPattern_ }
      , minQp{ minQp_ }
      , maxQp{ maxQp_ }
      , prefersGopRemainingFrames{ prefersGopRemainingFrames_ }
      , requiresGopRemainingFrames{ requiresGopRemainingFrames_ }
      , stdSyntaxFlags{ stdSyntaxFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264CapabilitiesKHR( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264CapabilitiesKHR( *reinterpret_cast<VideoEncodeH264CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264CapabilitiesKHR & operator=( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264CapabilitiesKHR & operator=( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeH264CapabilityFlagsKHR const &,
               StdVideoH264LevelIdc const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               int32_t const &,
               int32_t const &,
               Bool32 const &,
               Bool32 const &,
               VideoEncodeH264StdFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       maxLevelIdc,
                       maxSliceCount,
                       maxPPictureL0ReferenceCount,
                       maxBPictureL0ReferenceCount,
                       maxL1ReferenceCount,
                       maxTemporalLayerCount,
                       expectDyadicTemporalLayerPattern,
                       minQp,
                       maxQp,
                       prefersGopRemainingFrames,
                       requiresGopRemainingFrames,
                       stdSyntaxFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = maxSliceCount <=> rhs.maxSliceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 )
        return cmp;
      if ( auto cmp = expectDyadicTemporalLayerPattern <=> rhs.expectDyadicTemporalLayerPattern; cmp != 0 )
        return cmp;
      if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 )
        return cmp;
      if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 )
        return cmp;
      if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && ( maxSliceCount == rhs.maxSliceCount ) &&
             ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) &&
             ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) &&
             ( expectDyadicTemporalLayerPattern == rhs.expectDyadicTemporalLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) &&
             ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) &&
             ( stdSyntaxFlags == rhs.stdSyntaxFlags );
    }

    bool operator!=( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                     sType                            = StructureType::eVideoEncodeH264CapabilitiesKHR;
    void *                            pNext                            = {};
    VideoEncodeH264CapabilityFlagsKHR flags                            = {};
    StdVideoH264LevelIdc              maxLevelIdc                      = {};
    uint32_t                          maxSliceCount                    = {};
    uint32_t                          maxPPictureL0ReferenceCount      = {};
    uint32_t                          maxBPictureL0ReferenceCount      = {};
    uint32_t                          maxL1ReferenceCount              = {};
    uint32_t                          maxTemporalLayerCount            = {};
    Bool32                            expectDyadicTemporalLayerPattern = {};
    int32_t                           minQp                            = {};
    int32_t                           maxQp                            = {};
    Bool32                            prefersGopRemainingFrames        = {};
    Bool32                            requiresGopRemainingFrames       = {};
    VideoEncodeH264StdFlagsKHR        stdSyntaxFlags                   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264CapabilitiesKHR>
  {
    using Type = VideoEncodeH264CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesKHR>
  {
    using Type = VideoEncodeH264CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264DpbSlotInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264DpbSlotInfoKHR.html
  struct VideoEncodeH264DpbSlotInfoKHR
  {
    using NativeType = VkVideoEncodeH264DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {},
                                                        const void *                            pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264DpbSlotInfoKHR( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeH264DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264DpbSlotInfoKHR & operator=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264DpbSlotInfoKHR & operator=( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR &
      setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoEncodeH264ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType             = StructureType::eVideoEncodeH264DpbSlotInfoKHR;
    const void *                            pNext             = {};
    const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264DpbSlotInfoKHR>
  {
    using Type = VideoEncodeH264DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoKHR>
  {
    using Type = VideoEncodeH264DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264FrameSizeKHR.html
  struct VideoEncodeH264FrameSizeKHR
  {
    using NativeType = VkVideoEncodeH264FrameSizeKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : frameISize{ frameISize_ }
      , framePSize{ framePSize_ }
      , frameBSize{ frameBSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264FrameSizeKHR( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264FrameSizeKHR( *reinterpret_cast<VideoEncodeH264FrameSizeKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264FrameSizeKHR & operator=( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264FrameSizeKHR & operator=( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264FrameSizeKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
    {
      frameISize = frameISize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
    {
      framePSize = framePSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
    {
      frameBSize = frameBSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH264FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH264FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH264FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264FrameSizeKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( frameISize, framePSize, frameBSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264FrameSizeKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize );
#  endif
    }

    bool operator!=( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t frameISize = {};
    uint32_t framePSize = {};
    uint32_t frameBSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264FrameSizeKHR>
  {
    using Type = VideoEncodeH264FrameSizeKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeH264GopRemainingFrameInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264GopRemainingFrameInfoKHR.html
  struct VideoEncodeH264GopRemainingFrameInfoKHR
  {
    using NativeType = VkVideoEncodeH264GopRemainingFrameInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( Bool32       useGopRemainingFrames_ = {},
                                                                  uint32_t     gopRemainingI_         = {},
                                                                  uint32_t     gopRemainingP_         = {},
                                                                  uint32_t     gopRemainingB_         = {},
                                                                  const void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useGopRemainingFrames{ useGopRemainingFrames_ }
      , gopRemainingI{ gopRemainingI_ }
      , gopRemainingP{ gopRemainingP_ }
      , gopRemainingB{ gopRemainingB_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264GopRemainingFrameInfoKHR( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeH264GopRemainingFrameInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264GopRemainingFrameInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setUseGopRemainingFrames( Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
    {
      useGopRemainingFrames = useGopRemainingFrames_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingI = gopRemainingI_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingP = gopRemainingP_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingB = gopRemainingB_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH264GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH264GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH264GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264GopRemainingFrameInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264GopRemainingFrameInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) &&
             ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB );
#  endif
    }

    bool operator!=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR;
    const void *  pNext                 = {};
    Bool32        useGopRemainingFrames = {};
    uint32_t      gopRemainingI         = {};
    uint32_t      gopRemainingP         = {};
    uint32_t      gopRemainingB         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeH264GopRemainingFrameInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeH264GopRemainingFrameInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264NaluSliceInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264NaluSliceInfoKHR.html
  struct VideoEncodeH264NaluSliceInfoKHR
  {
    using NativeType = VkVideoEncodeH264NaluSliceInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264NaluSliceInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( int32_t                               constantQp_      = {},
                                                          const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {},
                                                          const void *                          pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , constantQp{ constantQp_ }
      , pStdSliceHeader{ pStdSliceHeader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264NaluSliceInfoKHR( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264NaluSliceInfoKHR( *reinterpret_cast<VideoEncodeH264NaluSliceInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264NaluSliceInfoKHR & operator=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264NaluSliceInfoKHR & operator=( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264NaluSliceInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT
    {
      constantQp = constantQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPStdSliceHeader( const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSliceHeader = pStdSliceHeader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264NaluSliceInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoKHR *>( this );
    }

    operator VkVideoEncodeH264NaluSliceInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264NaluSliceInfoKHR *>( this );
    }

    operator VkVideoEncodeH264NaluSliceInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264NaluSliceInfoKHR *>( this );
    }

    operator VkVideoEncodeH264NaluSliceInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264NaluSliceInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, int32_t const &, const StdVideoEncodeH264SliceHeader * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, constantQp, pStdSliceHeader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264NaluSliceInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceHeader == rhs.pStdSliceHeader );
#  endif
    }

    bool operator!=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType           = StructureType::eVideoEncodeH264NaluSliceInfoKHR;
    const void *                          pNext           = {};
    int32_t                               constantQp      = {};
    const StdVideoEncodeH264SliceHeader * pStdSliceHeader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264NaluSliceInfoKHR>
  {
    using Type = VideoEncodeH264NaluSliceInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceInfoKHR>
  {
    using Type = VideoEncodeH264NaluSliceInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264PictureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264PictureInfoKHR.html
  struct VideoEncodeH264PictureInfoKHR
  {
    using NativeType = VkVideoEncodeH264PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( uint32_t                                naluSliceEntryCount_ = {},
                                                        const VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_   = {},
                                                        const StdVideoEncodeH264PictureInfo *   pStdPictureInfo_     = {},
                                                        Bool32                                  generatePrefixNalu_  = {},
                                                        const void *                            pNext_               = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , naluSliceEntryCount{ naluSliceEntryCount_ }
      , pNaluSliceEntries{ pNaluSliceEntries_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
      , generatePrefixNalu{ generatePrefixNalu_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264PictureInfoKHR( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264PictureInfoKHR( *reinterpret_cast<VideoEncodeH264PictureInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264PictureInfoKHR( ArrayProxyNoTemporaries<const VideoEncodeH264NaluSliceInfoKHR> const & naluSliceEntries_,
                                   const StdVideoEncodeH264PictureInfo *                                  pStdPictureInfo_    = {},
                                   Bool32                                                                 generatePrefixNalu_ = {},
                                   const void *                                                           pNext_              = nullptr )
      : pNext( pNext_ )
      , naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) )
      , pNaluSliceEntries( naluSliceEntries_.data() )
      , pStdPictureInfo( pStdPictureInfo_ )
      , generatePrefixNalu( generatePrefixNalu_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264PictureInfoKHR & operator=( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264PictureInfoKHR & operator=( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceEntryCount = naluSliceEntryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR &
      setPNaluSliceEntries( const VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pNaluSliceEntries = pNaluSliceEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264PictureInfoKHR &
      setNaluSliceEntries( ArrayProxyNoTemporaries<const VideoEncodeH264NaluSliceInfoKHR> const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
      pNaluSliceEntries   = naluSliceEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setGeneratePrefixNalu( Bool32 generatePrefixNalu_ ) VULKAN_HPP_NOEXCEPT
    {
      generatePrefixNalu = generatePrefixNalu_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const VideoEncodeH264NaluSliceInfoKHR * const &,
               const StdVideoEncodeH264PictureInfo * const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, naluSliceEntryCount, pNaluSliceEntries, pStdPictureInfo, generatePrefixNalu );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceEntryCount == rhs.naluSliceEntryCount ) &&
             ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( generatePrefixNalu == rhs.generatePrefixNalu );
#  endif
    }

    bool operator!=( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType               = StructureType::eVideoEncodeH264PictureInfoKHR;
    const void *                            pNext               = {};
    uint32_t                                naluSliceEntryCount = {};
    const VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries   = {};
    const StdVideoEncodeH264PictureInfo *   pStdPictureInfo     = {};
    Bool32                                  generatePrefixNalu  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264PictureInfoKHR>
  {
    using Type = VideoEncodeH264PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264PictureInfoKHR>
  {
    using Type = VideoEncodeH264PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264ProfileInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264ProfileInfoKHR.html
  struct VideoEncodeH264ProfileInfoKHR
  {
    using NativeType = VkVideoEncodeH264ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfileIdc{ stdProfileIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264ProfileInfoKHR( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264ProfileInfoKHR( *reinterpret_cast<VideoEncodeH264ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264ProfileInfoKHR & operator=( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264ProfileInfoKHR & operator=( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoH264ProfileIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfileIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
    }

    bool operator!=( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType          sType         = StructureType::eVideoEncodeH264ProfileInfoKHR;
    const void *           pNext         = {};
    StdVideoH264ProfileIdc stdProfileIdc = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264ProfileInfoKHR>
  {
    using Type = VideoEncodeH264ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileInfoKHR>
  {
    using Type = VideoEncodeH264ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QpKHR.html
  struct VideoEncodeH264QpKHR
  {
    using NativeType = VkVideoEncodeH264QpKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
      : qpI{ qpI_ }
      , qpP{ qpP_ }
      , qpB{ qpB_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264QpKHR( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264QpKHR( *reinterpret_cast<VideoEncodeH264QpKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264QpKHR & operator=( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264QpKHR & operator=( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264QpKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
    {
      qpI = qpI_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
    {
      qpP = qpP_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
    {
      qpB = qpB_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264QpKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264QpKHR *>( this );
    }

    operator VkVideoEncodeH264QpKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264QpKHR *>( this );
    }

    operator VkVideoEncodeH264QpKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264QpKHR *>( this );
    }

    operator VkVideoEncodeH264QpKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264QpKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<int32_t const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( qpI, qpP, qpB );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264QpKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB );
#  endif
    }

    bool operator!=( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t qpI = {};
    int32_t qpP = {};
    int32_t qpB = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264QpKHR>
  {
    using Type = VideoEncodeH264QpKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeH264QualityLevelPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QualityLevelPropertiesKHR.html
  struct VideoEncodeH264QualityLevelPropertiesKHR
  {
    using NativeType = VkVideoEncodeH264QualityLevelPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags_         = {},
                                                                   uint32_t                           preferredGopFrameCount_            = {},
                                                                   uint32_t                           preferredIdrPeriod_                = {},
                                                                   uint32_t                           preferredConsecutiveBFrameCount_   = {},
                                                                   uint32_t                           preferredTemporalLayerCount_       = {},
                                                                   VideoEncodeH264QpKHR               preferredConstantQp_               = {},
                                                                   uint32_t                           preferredMaxL0ReferenceCount_      = {},
                                                                   uint32_t                           preferredMaxL1ReferenceCount_      = {},
                                                                   Bool32                             preferredStdEntropyCodingModeFlag_ = {},
                                                                   void *                             pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , preferredRateControlFlags{ preferredRateControlFlags_ }
      , preferredGopFrameCount{ preferredGopFrameCount_ }
      , preferredIdrPeriod{ preferredIdrPeriod_ }
      , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ }
      , preferredTemporalLayerCount{ preferredTemporalLayerCount_ }
      , preferredConstantQp{ preferredConstantQp_ }
      , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ }
      , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ }
      , preferredStdEntropyCodingModeFlag{ preferredStdEntropyCodingModeFlag_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264QualityLevelPropertiesKHR( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeH264QualityLevelPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264QualityLevelPropertiesKHR & operator=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264QualityLevelPropertiesKHR & operator=( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264QualityLevelPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH264QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH264QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH264QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH264QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264QualityLevelPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeH264RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               VideoEncodeH264QpKHR const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       preferredRateControlFlags,
                       preferredGopFrameCount,
                       preferredIdrPeriod,
                       preferredConsecutiveBFrameCount,
                       preferredTemporalLayerCount,
                       preferredConstantQp,
                       preferredMaxL0ReferenceCount,
                       preferredMaxL1ReferenceCount,
                       preferredStdEntropyCodingModeFlag );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264QualityLevelPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) &&
             ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) &&
             ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) &&
             ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) &&
             ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ) &&
             ( preferredStdEntropyCodingModeFlag == rhs.preferredStdEntropyCodingModeFlag );
#  endif
    }

    bool operator!=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                             = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR;
    void *                             pNext                             = {};
    VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags         = {};
    uint32_t                           preferredGopFrameCount            = {};
    uint32_t                           preferredIdrPeriod                = {};
    uint32_t                           preferredConsecutiveBFrameCount   = {};
    uint32_t                           preferredTemporalLayerCount       = {};
    VideoEncodeH264QpKHR               preferredConstantQp               = {};
    uint32_t                           preferredMaxL0ReferenceCount      = {};
    uint32_t                           preferredMaxL1ReferenceCount      = {};
    Bool32                             preferredStdEntropyCodingModeFlag = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeH264QualityLevelPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeH264QualityLevelPropertiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QuantizationMapCapabilitiesKHR.html
  struct VideoEncodeH264QuantizationMapCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeH264QuantizationMapCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minQpDelta{ minQpDelta_ }
      , maxQpDelta{ maxQpDelta_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264QuantizationMapCapabilitiesKHR( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264QuantizationMapCapabilitiesKHR( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeH264QuantizationMapCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264QuantizationMapCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264QuantizationMapCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minQpDelta, maxQpDelta );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264QuantizationMapCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta );
#  endif
    }

    bool operator!=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR;
    void *        pNext      = {};
    int32_t       minQpDelta = {};
    int32_t       maxQpDelta = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264RateControlInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlInfoKHR.html
  struct VideoEncodeH264RateControlInfoKHR
  {
    using NativeType = VkVideoEncodeH264RateControlInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264RateControlInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlFlagsKHR flags_                  = {},
                                                            uint32_t                           gopFrameCount_          = {},
                                                            uint32_t                           idrPeriod_              = {},
                                                            uint32_t                           consecutiveBFrameCount_ = {},
                                                            uint32_t                           temporalLayerCount_     = {},
                                                            const void *                       pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , gopFrameCount{ gopFrameCount_ }
      , idrPeriod{ idrPeriod_ }
      , consecutiveBFrameCount{ consecutiveBFrameCount_ }
      , temporalLayerCount{ temporalLayerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264RateControlInfoKHR( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264RateControlInfoKHR( *reinterpret_cast<VideoEncodeH264RateControlInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264RateControlInfoKHR & operator=( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264RateControlInfoKHR & operator=( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264RateControlInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setFlags( VideoEncodeH264RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      gopFrameCount = gopFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
    {
      idrPeriod = idrPeriod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      consecutiveBFrameCount = consecutiveBFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      temporalLayerCount = temporalLayerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264RateControlInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeH264RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, temporalLayerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264RateControlInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) &&
             ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( temporalLayerCount == rhs.temporalLayerCount );
#  endif
    }

    bool operator!=( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                  = StructureType::eVideoEncodeH264RateControlInfoKHR;
    const void *                       pNext                  = {};
    VideoEncodeH264RateControlFlagsKHR flags                  = {};
    uint32_t                           gopFrameCount          = {};
    uint32_t                           idrPeriod              = {};
    uint32_t                           consecutiveBFrameCount = {};
    uint32_t                           temporalLayerCount     = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264RateControlInfoKHR>
  {
    using Type = VideoEncodeH264RateControlInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlInfoKHR>
  {
    using Type = VideoEncodeH264RateControlInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264RateControlLayerInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlLayerInfoKHR.html
  struct VideoEncodeH264RateControlLayerInfoKHR
  {
    using NativeType = VkVideoEncodeH264RateControlLayerInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264RateControlLayerInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( Bool32                      useMinQp_        = {},
                                                                 VideoEncodeH264QpKHR        minQp_           = {},
                                                                 Bool32                      useMaxQp_        = {},
                                                                 VideoEncodeH264QpKHR        maxQp_           = {},
                                                                 Bool32                      useMaxFrameSize_ = {},
                                                                 VideoEncodeH264FrameSizeKHR maxFrameSize_    = {},
                                                                 const void *                pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMinQp{ useMinQp_ }
      , minQp{ minQp_ }
      , useMaxQp{ useMaxQp_ }
      , maxQp{ maxQp_ }
      , useMaxFrameSize{ useMaxFrameSize_ }
      , maxFrameSize{ maxFrameSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264RateControlLayerInfoKHR( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeH264RateControlLayerInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264RateControlLayerInfoKHR & operator=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264RateControlLayerInfoKHR & operator=( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264RateControlLayerInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMinQp( Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
    {
      useMinQp = useMinQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMinQp( VideoEncodeH264QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT
    {
      minQp = minQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxQp( Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxQp = useMaxQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxQp( VideoEncodeH264QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT
    {
      maxQp = maxQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxFrameSize( Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxFrameSize = useMaxFrameSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxFrameSize( VideoEncodeH264FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFrameSize = maxFrameSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH264RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Bool32 const &,
               VideoEncodeH264QpKHR const &,
               Bool32 const &,
               VideoEncodeH264QpKHR const &,
               Bool32 const &,
               VideoEncodeH264FrameSizeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264RateControlLayerInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
             ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize );
#  endif
    }

    bool operator!=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType           = StructureType::eVideoEncodeH264RateControlLayerInfoKHR;
    const void *                pNext           = {};
    Bool32                      useMinQp        = {};
    VideoEncodeH264QpKHR        minQp           = {};
    Bool32                      useMaxQp        = {};
    VideoEncodeH264QpKHR        maxQp           = {};
    Bool32                      useMaxFrameSize = {};
    VideoEncodeH264FrameSizeKHR maxFrameSize    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeH264RateControlLayerInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeH264RateControlLayerInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264SessionCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionCreateInfoKHR.html
  struct VideoEncodeH264SessionCreateInfoKHR
  {
    using NativeType = VkVideoEncodeH264SessionCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264SessionCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( Bool32               useMaxLevelIdc_ = {},
                                                              StdVideoH264LevelIdc maxLevelIdc_    = {},
                                                              const void *         pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMaxLevelIdc{ useMaxLevelIdc_ }
      , maxLevelIdc{ maxLevelIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionCreateInfoKHR( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeH264SessionCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264SessionCreateInfoKHR & operator=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264SessionCreateInfoKHR & operator=( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264SessionCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setUseMaxLevelIdc( Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxLevelIdc = useMaxLevelIdc_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH264LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLevelIdc = maxLevelIdc_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264SessionCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, StdVideoH264LevelIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) &&
             ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 );
    }

    bool operator!=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType        sType          = StructureType::eVideoEncodeH264SessionCreateInfoKHR;
    const void *         pNext          = {};
    Bool32               useMaxLevelIdc = {};
    StdVideoH264LevelIdc maxLevelIdc    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264SessionCreateInfoKHR>
  {
    using Type = VideoEncodeH264SessionCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionCreateInfoKHR>
  {
    using Type = VideoEncodeH264SessionCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264SessionParametersAddInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersAddInfoKHR.html
  struct VideoEncodeH264SessionParametersAddInfoKHR
  {
    using NativeType = VkVideoEncodeH264SessionParametersAddInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( uint32_t                                 stdSPSCount_ = {},
                                                                     const StdVideoH264SequenceParameterSet * pStdSPSs_    = {},
                                                                     uint32_t                                 stdPPSCount_ = {},
                                                                     const StdVideoH264PictureParameterSet *  pStdPPSs_    = {},
                                                                     const void *                             pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdSPSCount{ stdSPSCount_ }
      , pStdSPSs{ pStdSPSs_ }
      , stdPPSCount{ stdPPSCount_ }
      , pStdPPSs{ pStdPPSs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersAddInfoKHR( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersAddInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoKHR( ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_,
                                                ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const &  stdPPSs_ = {},
                                                const void *                                                            pNext_   = nullptr )
      : pNext( pNext_ )
      , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
      , pStdSPSs( stdSPSs_.data() )
      , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
      , pStdPPSs( stdPPSs_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264SessionParametersAddInfoKHR & operator=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264SessionParametersAddInfoKHR & operator=( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = stdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPSs = pStdSPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoKHR &
      setStdSPSs( ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
      pStdSPSs    = stdSPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = stdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPSs = pStdPPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoKHR &
      setStdPPSs( ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
      pStdPPSs    = stdPPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const StdVideoH264SequenceParameterSet * const &,
               uint32_t const &,
               const StdVideoH264PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersAddInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) &&
             ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
#  endif
    }

    bool operator!=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType       = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR;
    const void *                             pNext       = {};
    uint32_t                                 stdSPSCount = {};
    const StdVideoH264SequenceParameterSet * pStdSPSs    = {};
    uint32_t                                 stdPPSCount = {};
    const StdVideoH264PictureParameterSet *  pStdPPSs    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264SessionParametersAddInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersAddInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersAddInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersCreateInfoKHR.html
  struct VideoEncodeH264SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoEncodeH264SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoKHR( uint32_t                                           maxStdSPSCount_     = {},
                                                                        uint32_t                                           maxStdPPSCount_     = {},
                                                                        const VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {},
                                                                        const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxStdSPSCount{ maxStdSPSCount_ }
      , maxStdPPSCount{ maxStdPPSCount_ }
      , pParametersAddInfo{ pParametersAddInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264SessionParametersCreateInfoKHR( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersCreateInfoKHR( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdSPSCount = maxStdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdPPSCount = maxStdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR &
      setPParametersAddInfo( const VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VideoEncodeH264SessionParametersAddInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) &&
             ( pParametersAddInfo == rhs.pParametersAddInfo );
#  endif
    }

    bool operator!=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType              = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR;
    const void *                                       pNext              = {};
    uint32_t                                           maxStdSPSCount     = {};
    uint32_t                                           maxStdPPSCount     = {};
    const VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersFeedbackInfoKHR.html
  struct VideoEncodeH264SessionParametersFeedbackInfoKHR
  {
    using NativeType = VkVideoEncodeH264SessionParametersFeedbackInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( Bool32 hasStdSPSOverrides_ = {},
                                                                          Bool32 hasStdPPSOverrides_ = {},
                                                                          void * pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hasStdSPSOverrides{ hasStdSPSOverrides_ }
      , hasStdPPSOverrides{ hasStdPPSOverrides_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264SessionParametersFeedbackInfoKHR( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersFeedbackInfoKHR( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersFeedbackInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264SessionParametersFeedbackInfoKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264SessionParametersFeedbackInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hasStdSPSOverrides, hasStdPPSOverrides );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersFeedbackInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) &&
             ( hasStdPPSOverrides == rhs.hasStdPPSOverrides );
#  endif
    }

    bool operator!=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR;
    void *        pNext              = {};
    Bool32        hasStdSPSOverrides = {};
    Bool32        hasStdPPSOverrides = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264SessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH264SessionParametersGetInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersGetInfoKHR.html
  struct VideoEncodeH264SessionParametersGetInfoKHR
  {
    using NativeType = VkVideoEncodeH264SessionParametersGetInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR(
      Bool32 writeStdSPS_ = {}, Bool32 writeStdPPS_ = {}, uint32_t stdSPSId_ = {}, uint32_t stdPPSId_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , writeStdSPS{ writeStdSPS_ }
      , writeStdPPS{ writeStdPPS_ }
      , stdSPSId{ stdSPSId_ }
      , stdPPSId{ stdPPSId_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersGetInfoKHR( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeH264SessionParametersGetInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH264SessionParametersGetInfoKHR & operator=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH264SessionParametersGetInfoKHR & operator=( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH264SessionParametersGetInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdSPS( Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT
    {
      writeStdSPS = writeStdSPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdPPS( Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT
    {
      writeStdPPS = writeStdPPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSId = stdSPSId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSId = stdPPSId_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH264SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH264SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH264SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH264SessionParametersGetInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, Bool32 const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, writeStdSPS, writeStdPPS, stdSPSId, stdPPSId );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersGetInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdSPS == rhs.writeStdSPS ) && ( writeStdPPS == rhs.writeStdPPS ) &&
             ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId );
#  endif
    }

    bool operator!=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR;
    const void *  pNext       = {};
    Bool32        writeStdSPS = {};
    Bool32        writeStdPPS = {};
    uint32_t      stdSPSId    = {};
    uint32_t      stdPPSId    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH264SessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersGetInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeH264SessionParametersGetInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265CapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CapabilitiesKHR.html
  struct VideoEncodeH265CapabilitiesKHR
  {
    using NativeType = VkVideoEncodeH265CapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265CapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilityFlagsKHR         flags_                               = {},
                                                         StdVideoH265LevelIdc                      maxLevelIdc_                         = {},
                                                         uint32_t                                  maxSliceSegmentCount_                = {},
                                                         Extent2D                                  maxTiles_                            = {},
                                                         VideoEncodeH265CtbSizeFlagsKHR            ctbSizes_                            = {},
                                                         VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes_                 = {},
                                                         uint32_t                                  maxPPictureL0ReferenceCount_         = {},
                                                         uint32_t                                  maxBPictureL0ReferenceCount_         = {},
                                                         uint32_t                                  maxL1ReferenceCount_                 = {},
                                                         uint32_t                                  maxSubLayerCount_                    = {},
                                                         Bool32                                    expectDyadicTemporalSubLayerPattern_ = {},
                                                         int32_t                                   minQp_                               = {},
                                                         int32_t                                   maxQp_                               = {},
                                                         Bool32                                    prefersGopRemainingFrames_           = {},
                                                         Bool32                                    requiresGopRemainingFrames_          = {},
                                                         VideoEncodeH265StdFlagsKHR                stdSyntaxFlags_                      = {},
                                                         void *                                    pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , maxLevelIdc{ maxLevelIdc_ }
      , maxSliceSegmentCount{ maxSliceSegmentCount_ }
      , maxTiles{ maxTiles_ }
      , ctbSizes{ ctbSizes_ }
      , transformBlockSizes{ transformBlockSizes_ }
      , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ }
      , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ }
      , maxL1ReferenceCount{ maxL1ReferenceCount_ }
      , maxSubLayerCount{ maxSubLayerCount_ }
      , expectDyadicTemporalSubLayerPattern{ expectDyadicTemporalSubLayerPattern_ }
      , minQp{ minQp_ }
      , maxQp{ maxQp_ }
      , prefersGopRemainingFrames{ prefersGopRemainingFrames_ }
      , requiresGopRemainingFrames{ requiresGopRemainingFrames_ }
      , stdSyntaxFlags{ stdSyntaxFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265CapabilitiesKHR( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265CapabilitiesKHR( *reinterpret_cast<VideoEncodeH265CapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265CapabilitiesKHR & operator=( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265CapabilitiesKHR & operator=( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265CapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265CapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265CapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeH265CapabilityFlagsKHR const &,
               StdVideoH265LevelIdc const &,
               uint32_t const &,
               Extent2D const &,
               VideoEncodeH265CtbSizeFlagsKHR const &,
               VideoEncodeH265TransformBlockSizeFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               int32_t const &,
               int32_t const &,
               Bool32 const &,
               Bool32 const &,
               VideoEncodeH265StdFlagsKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       maxLevelIdc,
                       maxSliceSegmentCount,
                       maxTiles,
                       ctbSizes,
                       transformBlockSizes,
                       maxPPictureL0ReferenceCount,
                       maxBPictureL0ReferenceCount,
                       maxL1ReferenceCount,
                       maxSubLayerCount,
                       expectDyadicTemporalSubLayerPattern,
                       minQp,
                       maxQp,
                       prefersGopRemainingFrames,
                       requiresGopRemainingFrames,
                       stdSyntaxFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
      if ( auto cmp = maxSliceSegmentCount <=> rhs.maxSliceSegmentCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 )
        return cmp;
      if ( auto cmp = ctbSizes <=> rhs.ctbSizes; cmp != 0 )
        return cmp;
      if ( auto cmp = transformBlockSizes <=> rhs.transformBlockSizes; cmp != 0 )
        return cmp;
      if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 )
        return cmp;
      if ( auto cmp = maxSubLayerCount <=> rhs.maxSubLayerCount; cmp != 0 )
        return cmp;
      if ( auto cmp = expectDyadicTemporalSubLayerPattern <=> rhs.expectDyadicTemporalSubLayerPattern; cmp != 0 )
        return cmp;
      if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 )
        return cmp;
      if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 )
        return cmp;
      if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 )
        return cmp;
      if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 )
        return cmp;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ) && ( maxSliceSegmentCount == rhs.maxSliceSegmentCount ) &&
             ( maxTiles == rhs.maxTiles ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) &&
             ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) &&
             ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayerCount == rhs.maxSubLayerCount ) &&
             ( expectDyadicTemporalSubLayerPattern == rhs.expectDyadicTemporalSubLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) &&
             ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) &&
             ( stdSyntaxFlags == rhs.stdSyntaxFlags );
    }

    bool operator!=( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType                             sType                               = StructureType::eVideoEncodeH265CapabilitiesKHR;
    void *                                    pNext                               = {};
    VideoEncodeH265CapabilityFlagsKHR         flags                               = {};
    StdVideoH265LevelIdc                      maxLevelIdc                         = {};
    uint32_t                                  maxSliceSegmentCount                = {};
    Extent2D                                  maxTiles                            = {};
    VideoEncodeH265CtbSizeFlagsKHR            ctbSizes                            = {};
    VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes                 = {};
    uint32_t                                  maxPPictureL0ReferenceCount         = {};
    uint32_t                                  maxBPictureL0ReferenceCount         = {};
    uint32_t                                  maxL1ReferenceCount                 = {};
    uint32_t                                  maxSubLayerCount                    = {};
    Bool32                                    expectDyadicTemporalSubLayerPattern = {};
    int32_t                                   minQp                               = {};
    int32_t                                   maxQp                               = {};
    Bool32                                    prefersGopRemainingFrames           = {};
    Bool32                                    requiresGopRemainingFrames          = {};
    VideoEncodeH265StdFlagsKHR                stdSyntaxFlags                      = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265CapabilitiesKHR>
  {
    using Type = VideoEncodeH265CapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265CapabilitiesKHR>
  {
    using Type = VideoEncodeH265CapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265DpbSlotInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265DpbSlotInfoKHR.html
  struct VideoEncodeH265DpbSlotInfoKHR
  {
    using NativeType = VkVideoEncodeH265DpbSlotInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265DpbSlotInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {},
                                                        const void *                            pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , pStdReferenceInfo{ pStdReferenceInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265DpbSlotInfoKHR( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265DpbSlotInfoKHR( *reinterpret_cast<VideoEncodeH265DpbSlotInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265DpbSlotInfoKHR & operator=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265DpbSlotInfoKHR & operator=( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265DpbSlotInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR &
      setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoKHR *>( this );
    }

    operator VkVideoEncodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265DpbSlotInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, const StdVideoEncodeH265ReferenceInfo * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, pStdReferenceInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265DpbSlotInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
#  endif
    }

    bool operator!=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                           sType             = StructureType::eVideoEncodeH265DpbSlotInfoKHR;
    const void *                            pNext             = {};
    const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265DpbSlotInfoKHR>
  {
    using Type = VideoEncodeH265DpbSlotInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265DpbSlotInfoKHR>
  {
    using Type = VideoEncodeH265DpbSlotInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265FrameSizeKHR.html
  struct VideoEncodeH265FrameSizeKHR
  {
    using NativeType = VkVideoEncodeH265FrameSizeKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : frameISize{ frameISize_ }
      , framePSize{ framePSize_ }
      , frameBSize{ frameBSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265FrameSizeKHR( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265FrameSizeKHR( *reinterpret_cast<VideoEncodeH265FrameSizeKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265FrameSizeKHR & operator=( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265FrameSizeKHR & operator=( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265FrameSizeKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
    {
      frameISize = frameISize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
    {
      framePSize = framePSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
    {
      frameBSize = frameBSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH265FrameSizeKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH265FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265FrameSizeKHR *>( this );
    }

    operator VkVideoEncodeH265FrameSizeKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265FrameSizeKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( frameISize, framePSize, frameBSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265FrameSizeKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize );
#  endif
    }

    bool operator!=( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t frameISize = {};
    uint32_t framePSize = {};
    uint32_t frameBSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265FrameSizeKHR>
  {
    using Type = VideoEncodeH265FrameSizeKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeH265GopRemainingFrameInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265GopRemainingFrameInfoKHR.html
  struct VideoEncodeH265GopRemainingFrameInfoKHR
  {
    using NativeType = VkVideoEncodeH265GopRemainingFrameInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( Bool32       useGopRemainingFrames_ = {},
                                                                  uint32_t     gopRemainingI_         = {},
                                                                  uint32_t     gopRemainingP_         = {},
                                                                  uint32_t     gopRemainingB_         = {},
                                                                  const void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useGopRemainingFrames{ useGopRemainingFrames_ }
      , gopRemainingI{ gopRemainingI_ }
      , gopRemainingP{ gopRemainingP_ }
      , gopRemainingB{ gopRemainingB_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265GopRemainingFrameInfoKHR( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265GopRemainingFrameInfoKHR( *reinterpret_cast<VideoEncodeH265GopRemainingFrameInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265GopRemainingFrameInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setUseGopRemainingFrames( Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT
    {
      useGopRemainingFrames = useGopRemainingFrames_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingI = gopRemainingI_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingP = gopRemainingP_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT
    {
      gopRemainingB = gopRemainingB_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH265GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH265GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265GopRemainingFrameInfoKHR *>( this );
    }

    operator VkVideoEncodeH265GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265GopRemainingFrameInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265GopRemainingFrameInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) &&
             ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB );
#  endif
    }

    bool operator!=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR;
    const void *  pNext                 = {};
    Bool32        useGopRemainingFrames = {};
    uint32_t      gopRemainingI         = {};
    uint32_t      gopRemainingP         = {};
    uint32_t      gopRemainingB         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeH265GopRemainingFrameInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR>
  {
    using Type = VideoEncodeH265GopRemainingFrameInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265NaluSliceSegmentInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265NaluSliceSegmentInfoKHR.html
  struct VideoEncodeH265NaluSliceSegmentInfoKHR
  {
    using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( int32_t                                      constantQp_             = {},
                                                                 const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {},
                                                                 const void *                                 pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , constantQp{ constantQp_ }
      , pStdSliceSegmentHeader{ pStdSliceSegmentHeader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265NaluSliceSegmentInfoKHR( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265NaluSliceSegmentInfoKHR( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265NaluSliceSegmentInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT
    {
      constantQp = constantQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR &
      setPStdSliceSegmentHeader( const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSliceSegmentHeader = pStdSliceSegmentHeader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoKHR *>( this );
    }

    operator VkVideoEncodeH265NaluSliceSegmentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoKHR *>( this );
    }

    operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentInfoKHR *>( this );
    }

    operator VkVideoEncodeH265NaluSliceSegmentInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, int32_t const &, const StdVideoEncodeH265SliceSegmentHeader * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, constantQp, pStdSliceSegmentHeader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceSegmentHeader == rhs.pStdSliceSegmentHeader );
#  endif
    }

    bool operator!=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                sType                  = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR;
    const void *                                 pNext                  = {};
    int32_t                                      constantQp             = {};
    const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265NaluSliceSegmentInfoKHR>
  {
    using Type = VideoEncodeH265NaluSliceSegmentInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR>
  {
    using Type = VideoEncodeH265NaluSliceSegmentInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265PictureInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265PictureInfoKHR.html
  struct VideoEncodeH265PictureInfoKHR
  {
    using NativeType = VkVideoEncodeH265PictureInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265PictureInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( uint32_t                                       naluSliceSegmentEntryCount_ = {},
                                                        const VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_   = {},
                                                        const StdVideoEncodeH265PictureInfo *          pStdPictureInfo_            = {},
                                                        const void *                                   pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , naluSliceSegmentEntryCount{ naluSliceSegmentEntryCount_ }
      , pNaluSliceSegmentEntries{ pNaluSliceSegmentEntries_ }
      , pStdPictureInfo{ pStdPictureInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265PictureInfoKHR( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265PictureInfoKHR( *reinterpret_cast<VideoEncodeH265PictureInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265PictureInfoKHR( ArrayProxyNoTemporaries<const VideoEncodeH265NaluSliceSegmentInfoKHR> const & naluSliceSegmentEntries_,
                                   const StdVideoEncodeH265PictureInfo *                                         pStdPictureInfo_ = {},
                                   const void *                                                                  pNext_           = nullptr )
      : pNext( pNext_ )
      , naluSliceSegmentEntryCount( static_cast<uint32_t>( naluSliceSegmentEntries_.size() ) )
      , pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() )
      , pStdPictureInfo( pStdPictureInfo_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH265PictureInfoKHR & operator=( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265PictureInfoKHR & operator=( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265PictureInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR &
      setPNaluSliceSegmentEntries( const VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265PictureInfoKHR &
      setNaluSliceSegmentEntries( ArrayProxyNoTemporaries<const VideoEncodeH265NaluSliceSegmentInfoKHR> const & naluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceSegmentEntryCount = static_cast<uint32_t>( naluSliceSegmentEntries_.size() );
      pNaluSliceSegmentEntries   = naluSliceSegmentEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265PictureInfoKHR *>( this );
    }

    operator VkVideoEncodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265PictureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const VideoEncodeH265NaluSliceSegmentInfoKHR * const &,
               const StdVideoEncodeH265PictureInfo * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pStdPictureInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265PictureInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) &&
             ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo );
#  endif
    }

    bool operator!=( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                  sType                      = StructureType::eVideoEncodeH265PictureInfoKHR;
    const void *                                   pNext                      = {};
    uint32_t                                       naluSliceSegmentEntryCount = {};
    const VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries   = {};
    const StdVideoEncodeH265PictureInfo *          pStdPictureInfo            = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265PictureInfoKHR>
  {
    using Type = VideoEncodeH265PictureInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265PictureInfoKHR>
  {
    using Type = VideoEncodeH265PictureInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265ProfileInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265ProfileInfoKHR.html
  struct VideoEncodeH265ProfileInfoKHR
  {
    using NativeType = VkVideoEncodeH265ProfileInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265ProfileInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdProfileIdc{ stdProfileIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265ProfileInfoKHR( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265ProfileInfoKHR( *reinterpret_cast<VideoEncodeH265ProfileInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265ProfileInfoKHR & operator=( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265ProfileInfoKHR & operator=( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265ProfileInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265ProfileInfoKHR *>( this );
    }

    operator VkVideoEncodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265ProfileInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, StdVideoH265ProfileIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdProfileIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
    }

    bool operator!=( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType          sType         = StructureType::eVideoEncodeH265ProfileInfoKHR;
    const void *           pNext         = {};
    StdVideoH265ProfileIdc stdProfileIdc = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265ProfileInfoKHR>
  {
    using Type = VideoEncodeH265ProfileInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileInfoKHR>
  {
    using Type = VideoEncodeH265ProfileInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QpKHR.html
  struct VideoEncodeH265QpKHR
  {
    using NativeType = VkVideoEncodeH265QpKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT
      : qpI{ qpI_ }
      , qpP{ qpP_ }
      , qpB{ qpB_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265QpKHR( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265QpKHR( *reinterpret_cast<VideoEncodeH265QpKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265QpKHR & operator=( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265QpKHR & operator=( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265QpKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
    {
      qpI = qpI_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
    {
      qpP = qpP_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
    {
      qpB = qpB_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265QpKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265QpKHR *>( this );
    }

    operator VkVideoEncodeH265QpKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265QpKHR *>( this );
    }

    operator VkVideoEncodeH265QpKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265QpKHR *>( this );
    }

    operator VkVideoEncodeH265QpKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265QpKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<int32_t const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( qpI, qpP, qpB );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265QpKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB );
#  endif
    }

    bool operator!=( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t qpI = {};
    int32_t qpP = {};
    int32_t qpB = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265QpKHR>
  {
    using Type = VideoEncodeH265QpKHR;
  };
#endif

  // wrapper struct for struct VkVideoEncodeH265QualityLevelPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QualityLevelPropertiesKHR.html
  struct VideoEncodeH265QualityLevelPropertiesKHR
  {
    using NativeType = VkVideoEncodeH265QualityLevelPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags_       = {},
                                                                   uint32_t                           preferredGopFrameCount_          = {},
                                                                   uint32_t                           preferredIdrPeriod_              = {},
                                                                   uint32_t                           preferredConsecutiveBFrameCount_ = {},
                                                                   uint32_t                           preferredSubLayerCount_          = {},
                                                                   VideoEncodeH265QpKHR               preferredConstantQp_             = {},
                                                                   uint32_t                           preferredMaxL0ReferenceCount_    = {},
                                                                   uint32_t                           preferredMaxL1ReferenceCount_    = {},
                                                                   void *                             pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , preferredRateControlFlags{ preferredRateControlFlags_ }
      , preferredGopFrameCount{ preferredGopFrameCount_ }
      , preferredIdrPeriod{ preferredIdrPeriod_ }
      , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ }
      , preferredSubLayerCount{ preferredSubLayerCount_ }
      , preferredConstantQp{ preferredConstantQp_ }
      , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ }
      , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265QualityLevelPropertiesKHR( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265QualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeH265QualityLevelPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265QualityLevelPropertiesKHR & operator=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265QualityLevelPropertiesKHR & operator=( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265QualityLevelPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH265QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH265QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH265QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265QualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeH265QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265QualityLevelPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeH265RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               VideoEncodeH265QpKHR const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       preferredRateControlFlags,
                       preferredGopFrameCount,
                       preferredIdrPeriod,
                       preferredConsecutiveBFrameCount,
                       preferredSubLayerCount,
                       preferredConstantQp,
                       preferredMaxL0ReferenceCount,
                       preferredMaxL1ReferenceCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265QualityLevelPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) &&
             ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) &&
             ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredSubLayerCount == rhs.preferredSubLayerCount ) &&
             ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) &&
             ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount );
#  endif
    }

    bool operator!=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                           = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR;
    void *                             pNext                           = {};
    VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags       = {};
    uint32_t                           preferredGopFrameCount          = {};
    uint32_t                           preferredIdrPeriod              = {};
    uint32_t                           preferredConsecutiveBFrameCount = {};
    uint32_t                           preferredSubLayerCount          = {};
    VideoEncodeH265QpKHR               preferredConstantQp             = {};
    uint32_t                           preferredMaxL0ReferenceCount    = {};
    uint32_t                           preferredMaxL1ReferenceCount    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeH265QualityLevelPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265QualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeH265QualityLevelPropertiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QuantizationMapCapabilitiesKHR.html
  struct VideoEncodeH265QuantizationMapCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeH265QuantizationMapCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeH265QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , minQpDelta{ minQpDelta_ }
      , maxQpDelta{ maxQpDelta_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH265QuantizationMapCapabilitiesKHR( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265QuantizationMapCapabilitiesKHR( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265QuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeH265QuantizationMapCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265QuantizationMapCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265QuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265QuantizationMapCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, int32_t const &, int32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, minQpDelta, maxQpDelta );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265QuantizationMapCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta );
#  endif
    }

    bool operator!=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR;
    void *        pNext      = {};
    int32_t       minQpDelta = {};
    int32_t       maxQpDelta = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265RateControlInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlInfoKHR.html
  struct VideoEncodeH265RateControlInfoKHR
  {
    using NativeType = VkVideoEncodeH265RateControlInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265RateControlInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlFlagsKHR flags_                  = {},
                                                            uint32_t                           gopFrameCount_          = {},
                                                            uint32_t                           idrPeriod_              = {},
                                                            uint32_t                           consecutiveBFrameCount_ = {},
                                                            uint32_t                           subLayerCount_          = {},
                                                            const void *                       pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , gopFrameCount{ gopFrameCount_ }
      , idrPeriod{ idrPeriod_ }
      , consecutiveBFrameCount{ consecutiveBFrameCount_ }
      , subLayerCount{ subLayerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265RateControlInfoKHR( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265RateControlInfoKHR( *reinterpret_cast<VideoEncodeH265RateControlInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265RateControlInfoKHR & operator=( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265RateControlInfoKHR & operator=( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265RateControlInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setFlags( VideoEncodeH265RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      gopFrameCount = gopFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
    {
      idrPeriod = idrPeriod_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
    {
      consecutiveBFrameCount = consecutiveBFrameCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setSubLayerCount( uint32_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subLayerCount = subLayerCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265RateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265RateControlInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeH265RateControlFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, subLayerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265RateControlInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) &&
             ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( subLayerCount == rhs.subLayerCount );
#  endif
    }

    bool operator!=( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                      sType                  = StructureType::eVideoEncodeH265RateControlInfoKHR;
    const void *                       pNext                  = {};
    VideoEncodeH265RateControlFlagsKHR flags                  = {};
    uint32_t                           gopFrameCount          = {};
    uint32_t                           idrPeriod              = {};
    uint32_t                           consecutiveBFrameCount = {};
    uint32_t                           subLayerCount          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265RateControlInfoKHR>
  {
    using Type = VideoEncodeH265RateControlInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlInfoKHR>
  {
    using Type = VideoEncodeH265RateControlInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265RateControlLayerInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlLayerInfoKHR.html
  struct VideoEncodeH265RateControlLayerInfoKHR
  {
    using NativeType = VkVideoEncodeH265RateControlLayerInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265RateControlLayerInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( Bool32                      useMinQp_        = {},
                                                                 VideoEncodeH265QpKHR        minQp_           = {},
                                                                 Bool32                      useMaxQp_        = {},
                                                                 VideoEncodeH265QpKHR        maxQp_           = {},
                                                                 Bool32                      useMaxFrameSize_ = {},
                                                                 VideoEncodeH265FrameSizeKHR maxFrameSize_    = {},
                                                                 const void *                pNext_           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMinQp{ useMinQp_ }
      , minQp{ minQp_ }
      , useMaxQp{ useMaxQp_ }
      , maxQp{ maxQp_ }
      , useMaxFrameSize{ useMaxFrameSize_ }
      , maxFrameSize{ maxFrameSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265RateControlLayerInfoKHR( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265RateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeH265RateControlLayerInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265RateControlLayerInfoKHR & operator=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265RateControlLayerInfoKHR & operator=( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265RateControlLayerInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMinQp( Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
    {
      useMinQp = useMinQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMinQp( VideoEncodeH265QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT
    {
      minQp = minQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxQp( Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxQp = useMaxQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxQp( VideoEncodeH265QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT
    {
      maxQp = maxQp_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxFrameSize( Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxFrameSize = useMaxFrameSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxFrameSize( VideoEncodeH265FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFrameSize = maxFrameSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeH265RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               Bool32 const &,
               VideoEncodeH265QpKHR const &,
               Bool32 const &,
               VideoEncodeH265QpKHR const &,
               Bool32 const &,
               VideoEncodeH265FrameSizeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265RateControlLayerInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) &&
             ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize );
#  endif
    }

    bool operator!=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType           = StructureType::eVideoEncodeH265RateControlLayerInfoKHR;
    const void *                pNext           = {};
    Bool32                      useMinQp        = {};
    VideoEncodeH265QpKHR        minQp           = {};
    Bool32                      useMaxQp        = {};
    VideoEncodeH265QpKHR        maxQp           = {};
    Bool32                      useMaxFrameSize = {};
    VideoEncodeH265FrameSizeKHR maxFrameSize    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeH265RateControlLayerInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlLayerInfoKHR>
  {
    using Type = VideoEncodeH265RateControlLayerInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265SessionCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionCreateInfoKHR.html
  struct VideoEncodeH265SessionCreateInfoKHR
  {
    using NativeType = VkVideoEncodeH265SessionCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265SessionCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( Bool32               useMaxLevelIdc_ = {},
                                                              StdVideoH265LevelIdc maxLevelIdc_    = {},
                                                              const void *         pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , useMaxLevelIdc{ useMaxLevelIdc_ }
      , maxLevelIdc{ maxLevelIdc_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265SessionCreateInfoKHR( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265SessionCreateInfoKHR( *reinterpret_cast<VideoEncodeH265SessionCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265SessionCreateInfoKHR & operator=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265SessionCreateInfoKHR & operator=( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265SessionCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setUseMaxLevelIdc( Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      useMaxLevelIdc = useMaxLevelIdc_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH265LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLevelIdc = maxLevelIdc_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265SessionCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &, StdVideoH265LevelIdc const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#endif

    bool operator==( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) &&
             ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 );
    }

    bool operator!=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType        sType          = StructureType::eVideoEncodeH265SessionCreateInfoKHR;
    const void *         pNext          = {};
    Bool32               useMaxLevelIdc = {};
    StdVideoH265LevelIdc maxLevelIdc    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265SessionCreateInfoKHR>
  {
    using Type = VideoEncodeH265SessionCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionCreateInfoKHR>
  {
    using Type = VideoEncodeH265SessionCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265SessionParametersAddInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersAddInfoKHR.html
  struct VideoEncodeH265SessionParametersAddInfoKHR
  {
    using NativeType = VkVideoEncodeH265SessionParametersAddInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( uint32_t                                 stdVPSCount_ = {},
                                                                     const StdVideoH265VideoParameterSet *    pStdVPSs_    = {},
                                                                     uint32_t                                 stdSPSCount_ = {},
                                                                     const StdVideoH265SequenceParameterSet * pStdSPSs_    = {},
                                                                     uint32_t                                 stdPPSCount_ = {},
                                                                     const StdVideoH265PictureParameterSet *  pStdPPSs_    = {},
                                                                     const void *                             pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , stdVPSCount{ stdVPSCount_ }
      , pStdVPSs{ pStdVPSs_ }
      , stdSPSCount{ stdSPSCount_ }
      , pStdSPSs{ pStdSPSs_ }
      , stdPPSCount{ stdPPSCount_ }
      , pStdPPSs{ pStdPPSs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265SessionParametersAddInfoKHR( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265SessionParametersAddInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265SessionParametersAddInfoKHR( ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const &    stdVPSs_,
                                                ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ = {},
                                                ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const &  stdPPSs_ = {},
                                                const void *                                                            pNext_   = nullptr )
      : pNext( pNext_ )
      , stdVPSCount( static_cast<uint32_t>( stdVPSs_.size() ) )
      , pStdVPSs( stdVPSs_.data() )
      , stdSPSCount( static_cast<uint32_t>( stdSPSs_.size() ) )
      , pStdSPSs( stdSPSs_.data() )
      , stdPPSCount( static_cast<uint32_t>( stdPPSs_.size() ) )
      , pStdPPSs( stdPPSs_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH265SessionParametersAddInfoKHR & operator=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265SessionParametersAddInfoKHR & operator=( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdVPSCount = stdVPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdVPSs = pStdVPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSs( ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdVPSCount = static_cast<uint32_t>( stdVPSs_.size() );
      pStdVPSs    = stdVPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = stdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdSPSs = pStdSPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265SessionParametersAddInfoKHR &
      setStdSPSs( ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSCount = static_cast<uint32_t>( stdSPSs_.size() );
      pStdSPSs    = stdSPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = stdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPPSs = pStdPPSs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH265SessionParametersAddInfoKHR &
      setStdPPSs( ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSCount = static_cast<uint32_t>( stdPPSs_.size() );
      pStdPPSs    = stdPPSs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const StdVideoH265VideoParameterSet * const &,
               uint32_t const &,
               const StdVideoH265SequenceParameterSet * const &,
               uint32_t const &,
               const StdVideoH265PictureParameterSet * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265SessionParametersAddInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) &&
             ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs );
#  endif
    }

    bool operator!=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType       = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR;
    const void *                             pNext       = {};
    uint32_t                                 stdVPSCount = {};
    const StdVideoH265VideoParameterSet *    pStdVPSs    = {};
    uint32_t                                 stdSPSCount = {};
    const StdVideoH265SequenceParameterSet * pStdSPSs    = {};
    uint32_t                                 stdPPSCount = {};
    const StdVideoH265PictureParameterSet *  pStdPPSs    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265SessionParametersAddInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersAddInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersAddInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersAddInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265SessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersCreateInfoKHR.html
  struct VideoEncodeH265SessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoEncodeH265SessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoKHR( uint32_t                                           maxStdVPSCount_     = {},
                                                                        uint32_t                                           maxStdSPSCount_     = {},
                                                                        uint32_t                                           maxStdPPSCount_     = {},
                                                                        const VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {},
                                                                        const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxStdVPSCount{ maxStdVPSCount_ }
      , maxStdSPSCount{ maxStdSPSCount_ }
      , maxStdPPSCount{ maxStdPPSCount_ }
      , pParametersAddInfo{ pParametersAddInfo_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH265SessionParametersCreateInfoKHR( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265SessionParametersCreateInfoKHR( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265SessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdVPSCount = maxStdVPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdSPSCount = maxStdSPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxStdPPSCount = maxStdPPSCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR &
      setPParametersAddInfo( const VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               uint32_t const &,
               uint32_t const &,
               const VideoEncodeH265SessionParametersAddInfoKHR * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265SessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) &&
             ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
#  endif
    }

    bool operator!=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                                      sType              = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR;
    const void *                                       pNext              = {};
    uint32_t                                           maxStdVPSCount     = {};
    uint32_t                                           maxStdSPSCount     = {};
    uint32_t                                           maxStdPPSCount     = {};
    const VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersFeedbackInfoKHR.html
  struct VideoEncodeH265SessionParametersFeedbackInfoKHR
  {
    using NativeType = VkVideoEncodeH265SessionParametersFeedbackInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR( Bool32 hasStdVPSOverrides_ = {},
                                                                          Bool32 hasStdSPSOverrides_ = {},
                                                                          Bool32 hasStdPPSOverrides_ = {},
                                                                          void * pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hasStdVPSOverrides{ hasStdVPSOverrides_ }
      , hasStdSPSOverrides{ hasStdSPSOverrides_ }
      , hasStdPPSOverrides{ hasStdPPSOverrides_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH265SessionParametersFeedbackInfoKHR( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265SessionParametersFeedbackInfoKHR( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265SessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersFeedbackInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265SessionParametersFeedbackInfoKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265SessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265SessionParametersFeedbackInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hasStdVPSOverrides, hasStdSPSOverrides, hasStdPPSOverrides );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265SessionParametersFeedbackInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdVPSOverrides == rhs.hasStdVPSOverrides ) &&
             ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && ( hasStdPPSOverrides == rhs.hasStdPPSOverrides );
#  endif
    }

    bool operator!=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType              = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR;
    void *        pNext              = {};
    Bool32        hasStdVPSOverrides = {};
    Bool32        hasStdSPSOverrides = {};
    Bool32        hasStdPPSOverrides = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265SessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeH265SessionParametersGetInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersGetInfoKHR.html
  struct VideoEncodeH265SessionParametersGetInfoKHR
  {
    using NativeType = VkVideoEncodeH265SessionParametersGetInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( Bool32       writeStdVPS_ = {},
                                                                     Bool32       writeStdSPS_ = {},
                                                                     Bool32       writeStdPPS_ = {},
                                                                     uint32_t     stdVPSId_    = {},
                                                                     uint32_t     stdSPSId_    = {},
                                                                     uint32_t     stdPPSId_    = {},
                                                                     const void * pNext_       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , writeStdVPS{ writeStdVPS_ }
      , writeStdSPS{ writeStdSPS_ }
      , writeStdPPS{ writeStdPPS_ }
      , stdVPSId{ stdVPSId_ }
      , stdSPSId{ stdSPSId_ }
      , stdPPSId{ stdPPSId_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH265SessionParametersGetInfoKHR( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH265SessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeH265SessionParametersGetInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeH265SessionParametersGetInfoKHR & operator=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeH265SessionParametersGetInfoKHR & operator=( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeH265SessionParametersGetInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdVPS( Bool32 writeStdVPS_ ) VULKAN_HPP_NOEXCEPT
    {
      writeStdVPS = writeStdVPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdSPS( Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT
    {
      writeStdSPS = writeStdSPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdPPS( Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT
    {
      writeStdPPS = writeStdPPS_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdVPSId( uint32_t stdVPSId_ ) VULKAN_HPP_NOEXCEPT
    {
      stdVPSId = stdVPSId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT
    {
      stdSPSId = stdSPSId_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT
    {
      stdPPSId = stdPPSId_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeH265SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH265SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH265SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeH265SessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeH265SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeH265SessionParametersGetInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, Bool32 const &, Bool32 const &, Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, writeStdVPS, writeStdSPS, writeStdPPS, stdVPSId, stdSPSId, stdPPSId );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH265SessionParametersGetInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdVPS == rhs.writeStdVPS ) && ( writeStdSPS == rhs.writeStdSPS ) &&
             ( writeStdPPS == rhs.writeStdPPS ) && ( stdVPSId == rhs.stdVPSId ) && ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId );
#  endif
    }

    bool operator!=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType       = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR;
    const void *  pNext       = {};
    Bool32        writeStdVPS = {};
    Bool32        writeStdSPS = {};
    Bool32        writeStdPPS = {};
    uint32_t      stdVPSId    = {};
    uint32_t      stdSPSId    = {};
    uint32_t      stdPPSId    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeH265SessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersGetInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeH265SessionParametersGetInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeInfoKHR.html
  struct VideoEncodeInfoKHR
  {
    using NativeType = VkVideoEncodeInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeFlagsKHR               flags_                           = {},
                                             Buffer                            dstBuffer_                       = {},
                                             DeviceSize                        dstBufferOffset_                 = {},
                                             DeviceSize                        dstBufferRange_                  = {},
                                             VideoPictureResourceInfoKHR       srcPictureResource_              = {},
                                             const VideoReferenceSlotInfoKHR * pSetupReferenceSlot_             = {},
                                             uint32_t                          referenceSlotCount_              = {},
                                             const VideoReferenceSlotInfoKHR * pReferenceSlots_                 = {},
                                             uint32_t                          precedingExternallyEncodedBytes_ = {},
                                             const void *                      pNext_                           = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dstBuffer{ dstBuffer_ }
      , dstBufferOffset{ dstBufferOffset_ }
      , dstBufferRange{ dstBufferRange_ }
      , srcPictureResource{ srcPictureResource_ }
      , pSetupReferenceSlot{ pSetupReferenceSlot_ }
      , referenceSlotCount{ referenceSlotCount_ }
      , pReferenceSlots{ pReferenceSlots_ }
      , precedingExternallyEncodedBytes{ precedingExternallyEncodedBytes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) ) {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeInfoKHR( VideoEncodeFlagsKHR                                              flags_,
                        Buffer                                                           dstBuffer_,
                        DeviceSize                                                       dstBufferOffset_,
                        DeviceSize                                                       dstBufferRange_,
                        VideoPictureResourceInfoKHR                                      srcPictureResource_,
                        const VideoReferenceSlotInfoKHR *                                pSetupReferenceSlot_,
                        ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_,
                        uint32_t                                                         precedingExternallyEncodedBytes_ = {},
                        const void *                                                     pNext_                           = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , dstBuffer( dstBuffer_ )
      , dstBufferOffset( dstBufferOffset_ )
      , dstBufferRange( dstBufferRange_ )
      , srcPictureResource( srcPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
      , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBuffer( Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBuffer = dstBuffer_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferOffset( DeviceSize dstBufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBufferOffset = dstBufferOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferRange( DeviceSize dstBufferRange_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBufferRange = dstBufferRange_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource( VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcPictureResource = srcPictureResource_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot( const VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetupReferenceSlot = pSetupReferenceSlot_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPReferenceSlots( const VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeInfoKHR & setReferenceSlots( ArrayProxyNoTemporaries<const VideoReferenceSlotInfoKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeInfoKHR *>( this );
    }

    operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeInfoKHR *>( this );
    }

    operator VkVideoEncodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeInfoKHR *>( this );
    }

    operator VkVideoEncodeInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeFlagsKHR const &,
               Buffer const &,
               DeviceSize const &,
               DeviceSize const &,
               VideoPictureResourceInfoKHR const &,
               const VideoReferenceSlotInfoKHR * const &,
               uint32_t const &,
               const VideoReferenceSlotInfoKHR * const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       flags,
                       dstBuffer,
                       dstBufferOffset,
                       dstBufferRange,
                       srcPictureResource,
                       pSetupReferenceSlot,
                       referenceSlotCount,
                       pReferenceSlots,
                       precedingExternallyEncodedBytes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstBuffer == rhs.dstBuffer ) &&
             ( dstBufferOffset == rhs.dstBufferOffset ) && ( dstBufferRange == rhs.dstBufferRange ) && ( srcPictureResource == rhs.srcPictureResource ) &&
             ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
             ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
#  endif
    }

    bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                     sType                           = StructureType::eVideoEncodeInfoKHR;
    const void *                      pNext                           = {};
    VideoEncodeFlagsKHR               flags                           = {};
    Buffer                            dstBuffer                       = {};
    DeviceSize                        dstBufferOffset                 = {};
    DeviceSize                        dstBufferRange                  = {};
    VideoPictureResourceInfoKHR       srcPictureResource              = {};
    const VideoReferenceSlotInfoKHR * pSetupReferenceSlot             = {};
    uint32_t                          referenceSlotCount              = {};
    const VideoReferenceSlotInfoKHR * pReferenceSlots                 = {};
    uint32_t                          precedingExternallyEncodedBytes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeInfoKHR>
  {
    using Type = VideoEncodeInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
  {
    using Type = VideoEncodeInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeIntraRefreshCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeIntraRefreshCapabilitiesKHR.html
  struct VideoEncodeIntraRefreshCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeIntraRefreshCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeIntraRefreshCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeIntraRefreshCapabilitiesKHR( VideoEncodeIntraRefreshModeFlagsKHR intraRefreshModes_                       = {},
                                                                 uint32_t                            maxIntraRefreshCycleDuration_            = {},
                                                                 uint32_t                            maxIntraRefreshActiveReferencePictures_  = {},
                                                                 Bool32                              partitionIndependentIntraRefreshRegions_ = {},
                                                                 Bool32                              nonRectangularIntraRefreshRegions_       = {},
                                                                 void *                              pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , intraRefreshModes{ intraRefreshModes_ }
      , maxIntraRefreshCycleDuration{ maxIntraRefreshCycleDuration_ }
      , maxIntraRefreshActiveReferencePictures{ maxIntraRefreshActiveReferencePictures_ }
      , partitionIndependentIntraRefreshRegions{ partitionIndependentIntraRefreshRegions_ }
      , nonRectangularIntraRefreshRegions{ nonRectangularIntraRefreshRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeIntraRefreshCapabilitiesKHR( VideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeIntraRefreshCapabilitiesKHR( VkVideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeIntraRefreshCapabilitiesKHR( *reinterpret_cast<VideoEncodeIntraRefreshCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeIntraRefreshCapabilitiesKHR & operator=( VideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeIntraRefreshCapabilitiesKHR & operator=( VkVideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeIntraRefreshCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeIntraRefreshCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeIntraRefreshCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeIntraRefreshCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeIntraRefreshCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeIntraRefreshCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeIntraRefreshModeFlagsKHR const &,
               uint32_t const &,
               uint32_t const &,
               Bool32 const &,
               Bool32 const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       intraRefreshModes,
                       maxIntraRefreshCycleDuration,
                       maxIntraRefreshActiveReferencePictures,
                       partitionIndependentIntraRefreshRegions,
                       nonRectangularIntraRefreshRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeIntraRefreshCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( intraRefreshModes == rhs.intraRefreshModes ) &&
             ( maxIntraRefreshCycleDuration == rhs.maxIntraRefreshCycleDuration ) &&
             ( maxIntraRefreshActiveReferencePictures == rhs.maxIntraRefreshActiveReferencePictures ) &&
             ( partitionIndependentIntraRefreshRegions == rhs.partitionIndependentIntraRefreshRegions ) &&
             ( nonRectangularIntraRefreshRegions == rhs.nonRectangularIntraRefreshRegions );
#  endif
    }

    bool operator!=( VideoEncodeIntraRefreshCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                       sType                                   = StructureType::eVideoEncodeIntraRefreshCapabilitiesKHR;
    void *                              pNext                                   = {};
    VideoEncodeIntraRefreshModeFlagsKHR intraRefreshModes                       = {};
    uint32_t                            maxIntraRefreshCycleDuration            = {};
    uint32_t                            maxIntraRefreshActiveReferencePictures  = {};
    Bool32                              partitionIndependentIntraRefreshRegions = {};
    Bool32                              nonRectangularIntraRefreshRegions       = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeIntraRefreshCapabilitiesKHR>
  {
    using Type = VideoEncodeIntraRefreshCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeIntraRefreshCapabilitiesKHR>
  {
    using Type = VideoEncodeIntraRefreshCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeIntraRefreshInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeIntraRefreshInfoKHR.html
  struct VideoEncodeIntraRefreshInfoKHR
  {
    using NativeType = VkVideoEncodeIntraRefreshInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeIntraRefreshInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeIntraRefreshInfoKHR( uint32_t     intraRefreshCycleDuration_ = {},
                                                         uint32_t     intraRefreshIndex_         = {},
                                                         const void * pNext_                     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , intraRefreshCycleDuration{ intraRefreshCycleDuration_ }
      , intraRefreshIndex{ intraRefreshIndex_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeIntraRefreshInfoKHR( VideoEncodeIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeIntraRefreshInfoKHR( VkVideoEncodeIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeIntraRefreshInfoKHR( *reinterpret_cast<VideoEncodeIntraRefreshInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeIntraRefreshInfoKHR & operator=( VideoEncodeIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeIntraRefreshInfoKHR & operator=( VkVideoEncodeIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeIntraRefreshInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeIntraRefreshInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeIntraRefreshInfoKHR & setIntraRefreshCycleDuration( uint32_t intraRefreshCycleDuration_ ) VULKAN_HPP_NOEXCEPT
    {
      intraRefreshCycleDuration = intraRefreshCycleDuration_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeIntraRefreshInfoKHR & setIntraRefreshIndex( uint32_t intraRefreshIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      intraRefreshIndex = intraRefreshIndex_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeIntraRefreshInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoEncodeIntraRefreshInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeIntraRefreshInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, intraRefreshCycleDuration, intraRefreshIndex );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeIntraRefreshInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeIntraRefreshInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( intraRefreshCycleDuration == rhs.intraRefreshCycleDuration ) &&
             ( intraRefreshIndex == rhs.intraRefreshIndex );
#  endif
    }

    bool operator!=( VideoEncodeIntraRefreshInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                     = StructureType::eVideoEncodeIntraRefreshInfoKHR;
    const void *  pNext                     = {};
    uint32_t      intraRefreshCycleDuration = {};
    uint32_t      intraRefreshIndex         = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeIntraRefreshInfoKHR>
  {
    using Type = VideoEncodeIntraRefreshInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeIntraRefreshInfoKHR>
  {
    using Type = VideoEncodeIntraRefreshInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeProfileRgbConversionInfoVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeProfileRgbConversionInfoVALVE.html
  struct VideoEncodeProfileRgbConversionInfoVALVE
  {
    using NativeType = VkVideoEncodeProfileRgbConversionInfoVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeProfileRgbConversionInfoVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeProfileRgbConversionInfoVALVE( Bool32 performEncodeRgbConversion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , performEncodeRgbConversion{ performEncodeRgbConversion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeProfileRgbConversionInfoVALVE( VideoEncodeProfileRgbConversionInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeProfileRgbConversionInfoVALVE( VkVideoEncodeProfileRgbConversionInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeProfileRgbConversionInfoVALVE( *reinterpret_cast<VideoEncodeProfileRgbConversionInfoVALVE const *>( &rhs ) )
    {
    }

    VideoEncodeProfileRgbConversionInfoVALVE & operator=( VideoEncodeProfileRgbConversionInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeProfileRgbConversionInfoVALVE & operator=( VkVideoEncodeProfileRgbConversionInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeProfileRgbConversionInfoVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeProfileRgbConversionInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeProfileRgbConversionInfoVALVE & setPerformEncodeRgbConversion( Bool32 performEncodeRgbConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      performEncodeRgbConversion = performEncodeRgbConversion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeProfileRgbConversionInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeProfileRgbConversionInfoVALVE *>( this );
    }

    operator VkVideoEncodeProfileRgbConversionInfoVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeProfileRgbConversionInfoVALVE *>( this );
    }

    operator VkVideoEncodeProfileRgbConversionInfoVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeProfileRgbConversionInfoVALVE *>( this );
    }

    operator VkVideoEncodeProfileRgbConversionInfoVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeProfileRgbConversionInfoVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, performEncodeRgbConversion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeProfileRgbConversionInfoVALVE const & ) const = default;
#else
    bool operator==( VideoEncodeProfileRgbConversionInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performEncodeRgbConversion == rhs.performEncodeRgbConversion );
#  endif
    }

    bool operator!=( VideoEncodeProfileRgbConversionInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                      = StructureType::eVideoEncodeProfileRgbConversionInfoVALVE;
    const void *  pNext                      = {};
    Bool32        performEncodeRgbConversion = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeProfileRgbConversionInfoVALVE>
  {
    using Type = VideoEncodeProfileRgbConversionInfoVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeProfileRgbConversionInfoVALVE>
  {
    using Type = VideoEncodeProfileRgbConversionInfoVALVE;
  };

  // wrapper struct for struct VkVideoEncodeQualityLevelInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelInfoKHR.html
  struct VideoEncodeQualityLevelInfoKHR
  {
    using NativeType = VkVideoEncodeQualityLevelInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeQualityLevelInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , qualityLevel{ qualityLevel_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeQualityLevelInfoKHR( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeQualityLevelInfoKHR( *reinterpret_cast<VideoEncodeQualityLevelInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeQualityLevelInfoKHR & operator=( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeQualityLevelInfoKHR & operator=( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeQualityLevelInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      qualityLevel = qualityLevel_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeQualityLevelInfoKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeQualityLevelInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, qualityLevel );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeQualityLevelInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( qualityLevel == rhs.qualityLevel );
#  endif
    }

    bool operator!=( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eVideoEncodeQualityLevelInfoKHR;
    const void *  pNext        = {};
    uint32_t      qualityLevel = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeQualityLevelInfoKHR>
  {
    using Type = VideoEncodeQualityLevelInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeQualityLevelInfoKHR>
  {
    using Type = VideoEncodeQualityLevelInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeQualityLevelPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelPropertiesKHR.html
  struct VideoEncodeQualityLevelPropertiesKHR
  {
    using NativeType = VkVideoEncodeQualityLevelPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeQualityLevelPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeQualityLevelPropertiesKHR( VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode_ = VideoEncodeRateControlModeFlagBitsKHR::eDefault,
                                            uint32_t                              preferredRateControlLayerCount_ = {},
                                            void *                                pNext_                          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , preferredRateControlMode{ preferredRateControlMode_ }
      , preferredRateControlLayerCount{ preferredRateControlLayerCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeQualityLevelPropertiesKHR( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeQualityLevelPropertiesKHR( *reinterpret_cast<VideoEncodeQualityLevelPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeQualityLevelPropertiesKHR & operator=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeQualityLevelPropertiesKHR & operator=( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeQualityLevelPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeQualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeQualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeQualityLevelPropertiesKHR *>( this );
    }

    operator VkVideoEncodeQualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, VideoEncodeRateControlModeFlagBitsKHR const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, preferredRateControlMode, preferredRateControlLayerCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeQualityLevelPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlMode == rhs.preferredRateControlMode ) &&
             ( preferredRateControlLayerCount == rhs.preferredRateControlLayerCount );
#  endif
    }

    bool operator!=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                         sType                          = StructureType::eVideoEncodeQualityLevelPropertiesKHR;
    void *                                pNext                          = {};
    VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode       = VideoEncodeRateControlModeFlagBitsKHR::eDefault;
    uint32_t                              preferredRateControlLayerCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeQualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeQualityLevelPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeQualityLevelPropertiesKHR>
  {
    using Type = VideoEncodeQualityLevelPropertiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeQuantizationMapCapabilitiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapCapabilitiesKHR.html
  struct VideoEncodeQuantizationMapCapabilitiesKHR
  {
    using NativeType = VkVideoEncodeQuantizationMapCapabilitiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( Extent2D maxQuantizationMapExtent_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , maxQuantizationMapExtent{ maxQuantizationMapExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeQuantizationMapCapabilitiesKHR( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeQuantizationMapCapabilitiesKHR( *reinterpret_cast<VideoEncodeQuantizationMapCapabilitiesKHR const *>( &rhs ) )
    {
    }

    VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeQuantizationMapCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeQuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeQuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeQuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeQuantizationMapCapabilitiesKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeQuantizationMapCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, maxQuantizationMapExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeQuantizationMapCapabilitiesKHR const & ) const = default;
#else
    bool operator==( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxQuantizationMapExtent == rhs.maxQuantizationMapExtent );
#  endif
    }

    bool operator!=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR;
    void *        pNext                    = {};
    Extent2D      maxQuantizationMapExtent = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeQuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeQuantizationMapCapabilitiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR>
  {
    using Type = VideoEncodeQuantizationMapCapabilitiesKHR;
  };

  // wrapper struct for struct VkVideoEncodeQuantizationMapInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapInfoKHR.html
  struct VideoEncodeQuantizationMapInfoKHR
  {
    using NativeType = VkVideoEncodeQuantizationMapInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeQuantizationMapInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( ImageView    quantizationMap_       = {},
                                                            Extent2D     quantizationMapExtent_ = {},
                                                            const void * pNext_                 = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , quantizationMap{ quantizationMap_ }
      , quantizationMapExtent{ quantizationMapExtent_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeQuantizationMapInfoKHR( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeQuantizationMapInfoKHR( *reinterpret_cast<VideoEncodeQuantizationMapInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeQuantizationMapInfoKHR & operator=( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeQuantizationMapInfoKHR & operator=( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeQuantizationMapInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMap( ImageView quantizationMap_ ) VULKAN_HPP_NOEXCEPT
    {
      quantizationMap = quantizationMap_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMapExtent( Extent2D const & quantizationMapExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      quantizationMapExtent = quantizationMapExtent_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeQuantizationMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeQuantizationMapInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeQuantizationMapInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeQuantizationMapInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeQuantizationMapInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, ImageView const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, quantizationMap, quantizationMapExtent );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeQuantizationMapInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMap == rhs.quantizationMap ) &&
             ( quantizationMapExtent == rhs.quantizationMapExtent );
#  endif
    }

    bool operator!=( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                 = StructureType::eVideoEncodeQuantizationMapInfoKHR;
    const void *  pNext                 = {};
    ImageView     quantizationMap       = {};
    Extent2D      quantizationMapExtent = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeQuantizationMapInfoKHR>
  {
    using Type = VideoEncodeQuantizationMapInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapInfoKHR>
  {
    using Type = VideoEncodeQuantizationMapInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR.html
  struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( Extent2D     quantizationMapTexelSize_ = {},
                                                                                   const void * pNext_                    = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , quantizationMapTexelSize{ quantizationMapTexelSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( *reinterpret_cast<VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeQuantizationMapSessionParametersCreateInfoKHR &
      operator=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeQuantizationMapSessionParametersCreateInfoKHR &
      operator=( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR &
      setQuantizationMapTexelSize( Extent2D const & quantizationMapTexelSize_ ) VULKAN_HPP_NOEXCEPT
    {
      quantizationMapTexelSize = quantizationMapTexelSize_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, quantizationMapTexelSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize );
#  endif
    }

    bool operator!=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
    const void *  pNext                    = {};
    Extent2D      quantizationMapTexelSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR>
  {
    using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeRateControlLayerInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlLayerInfoKHR.html
  struct VideoEncodeRateControlLayerInfoKHR
  {
    using NativeType = VkVideoEncodeRateControlLayerInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeRateControlLayerInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint64_t     averageBitrate_       = {},
                                                             uint64_t     maxBitrate_           = {},
                                                             uint32_t     frameRateNumerator_   = {},
                                                             uint32_t     frameRateDenominator_ = {},
                                                             const void * pNext_                = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , averageBitrate{ averageBitrate_ }
      , maxBitrate{ maxBitrate_ }
      , frameRateNumerator{ frameRateNumerator_ }
      , frameRateDenominator{ frameRateDenominator_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint64_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
    {
      averageBitrate = averageBitrate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint64_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
    {
      maxBitrate = maxBitrate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
    {
      frameRateNumerator = frameRateNumerator_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
    {
      frameRateDenominator = frameRateDenominator_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint64_t const &, uint64_t const &, uint32_t const &, uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && ( maxBitrate == rhs.maxBitrate ) &&
             ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator );
#  endif
    }

    bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                = StructureType::eVideoEncodeRateControlLayerInfoKHR;
    const void *  pNext                = {};
    uint64_t      averageBitrate       = {};
    uint64_t      maxBitrate           = {};
    uint32_t      frameRateNumerator   = {};
    uint32_t      frameRateDenominator = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeRateControlLayerInfoKHR>
  {
    using Type = VideoEncodeRateControlLayerInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeRateControlLayerInfoKHR>
  {
    using Type = VideoEncodeRateControlLayerInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeRateControlInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlInfoKHR.html
  struct VideoEncodeRateControlInfoKHR
  {
    using NativeType = VkVideoEncodeRateControlInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeRateControlInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeRateControlInfoKHR( VideoEncodeRateControlFlagsKHR             flags_                        = {},
                                     VideoEncodeRateControlModeFlagBitsKHR      rateControlMode_              = VideoEncodeRateControlModeFlagBitsKHR::eDefault,
                                     uint32_t                                   layerCount_                   = {},
                                     const VideoEncodeRateControlLayerInfoKHR * pLayers_                      = {},
                                     uint32_t                                   virtualBufferSizeInMs_        = {},
                                     uint32_t                                   initialVirtualBufferSizeInMs_ = {},
                                     const void *                               pNext_                        = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , rateControlMode{ rateControlMode_ }
      , layerCount{ layerCount_ }
      , pLayers{ pLayers_ }
      , virtualBufferSizeInMs{ virtualBufferSizeInMs_ }
      , initialVirtualBufferSizeInMs{ initialVirtualBufferSizeInMs_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeRateControlInfoKHR( VideoEncodeRateControlFlagsKHR                                            flags_,
                                   VideoEncodeRateControlModeFlagBitsKHR                                     rateControlMode_,
                                   ArrayProxyNoTemporaries<const VideoEncodeRateControlLayerInfoKHR> const & layers_,
                                   uint32_t                                                                  virtualBufferSizeInMs_        = {},
                                   uint32_t                                                                  initialVirtualBufferSizeInMs_ = {},
                                   const void *                                                              pNext_                        = nullptr )
      : pNext( pNext_ )
      , flags( flags_ )
      , rateControlMode( rateControlMode_ )
      , layerCount( static_cast<uint32_t>( layers_.size() ) )
      , pLayers( layers_.data() )
      , virtualBufferSizeInMs( virtualBufferSizeInMs_ )
      , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode( VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
    {
      rateControlMode = rateControlMode_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayers( const VideoEncodeRateControlLayerInfoKHR * pLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      pLayers = pLayers_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeRateControlInfoKHR & setLayers( ArrayProxyNoTemporaries<const VideoEncodeRateControlLayerInfoKHR> const & layers_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = static_cast<uint32_t>( layers_.size() );
      pLayers    = layers_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
    {
      virtualBufferSizeInMs = virtualBufferSizeInMs_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
    {
      initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeRateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeRateControlInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeRateControlFlagsKHR const &,
               VideoEncodeRateControlModeFlagBitsKHR const &,
               uint32_t const &,
               const VideoEncodeRateControlLayerInfoKHR * const &,
               uint32_t const &,
               uint32_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayers, virtualBufferSizeInMs, initialVirtualBufferSizeInMs );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlMode == rhs.rateControlMode ) &&
             ( layerCount == rhs.layerCount ) && ( pLayers == rhs.pLayers ) && ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) &&
             ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
#  endif
    }

    bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                              sType                        = StructureType::eVideoEncodeRateControlInfoKHR;
    const void *                               pNext                        = {};
    VideoEncodeRateControlFlagsKHR             flags                        = {};
    VideoEncodeRateControlModeFlagBitsKHR      rateControlMode              = VideoEncodeRateControlModeFlagBitsKHR::eDefault;
    uint32_t                                   layerCount                   = {};
    const VideoEncodeRateControlLayerInfoKHR * pLayers                      = {};
    uint32_t                                   virtualBufferSizeInMs        = {};
    uint32_t                                   initialVirtualBufferSizeInMs = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeRateControlInfoKHR>
  {
    using Type = VideoEncodeRateControlInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
  {
    using Type = VideoEncodeRateControlInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeRgbConversionCapabilitiesVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRgbConversionCapabilitiesVALVE.html
  struct VideoEncodeRgbConversionCapabilitiesVALVE
  {
    using NativeType = VkVideoEncodeRgbConversionCapabilitiesVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeRgbConversionCapabilitiesVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeRgbConversionCapabilitiesVALVE( VideoEncodeRgbModelConversionFlagsVALVE  rgbModels_      = {},
                                                                    VideoEncodeRgbRangeCompressionFlagsVALVE rgbRanges_      = {},
                                                                    VideoEncodeRgbChromaOffsetFlagsVALVE     xChromaOffsets_ = {},
                                                                    VideoEncodeRgbChromaOffsetFlagsVALVE     yChromaOffsets_ = {},
                                                                    void *                                   pNext_          = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rgbModels{ rgbModels_ }
      , rgbRanges{ rgbRanges_ }
      , xChromaOffsets{ xChromaOffsets_ }
      , yChromaOffsets{ yChromaOffsets_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeRgbConversionCapabilitiesVALVE( VideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeRgbConversionCapabilitiesVALVE( VkVideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeRgbConversionCapabilitiesVALVE( *reinterpret_cast<VideoEncodeRgbConversionCapabilitiesVALVE const *>( &rhs ) )
    {
    }

    VideoEncodeRgbConversionCapabilitiesVALVE & operator=( VideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeRgbConversionCapabilitiesVALVE & operator=( VkVideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeRgbConversionCapabilitiesVALVE const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeRgbConversionCapabilitiesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeRgbConversionCapabilitiesVALVE *>( this );
    }

    operator VkVideoEncodeRgbConversionCapabilitiesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeRgbConversionCapabilitiesVALVE *>( this );
    }

    operator VkVideoEncodeRgbConversionCapabilitiesVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeRgbConversionCapabilitiesVALVE *>( this );
    }

    operator VkVideoEncodeRgbConversionCapabilitiesVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeRgbConversionCapabilitiesVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               VideoEncodeRgbModelConversionFlagsVALVE const &,
               VideoEncodeRgbRangeCompressionFlagsVALVE const &,
               VideoEncodeRgbChromaOffsetFlagsVALVE const &,
               VideoEncodeRgbChromaOffsetFlagsVALVE const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rgbModels, rgbRanges, xChromaOffsets, yChromaOffsets );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeRgbConversionCapabilitiesVALVE const & ) const = default;
#else
    bool operator==( VideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rgbModels == rhs.rgbModels ) && ( rgbRanges == rhs.rgbRanges ) &&
             ( xChromaOffsets == rhs.xChromaOffsets ) && ( yChromaOffsets == rhs.yChromaOffsets );
#  endif
    }

    bool operator!=( VideoEncodeRgbConversionCapabilitiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                            sType          = StructureType::eVideoEncodeRgbConversionCapabilitiesVALVE;
    void *                                   pNext          = {};
    VideoEncodeRgbModelConversionFlagsVALVE  rgbModels      = {};
    VideoEncodeRgbRangeCompressionFlagsVALVE rgbRanges      = {};
    VideoEncodeRgbChromaOffsetFlagsVALVE     xChromaOffsets = {};
    VideoEncodeRgbChromaOffsetFlagsVALVE     yChromaOffsets = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeRgbConversionCapabilitiesVALVE>
  {
    using Type = VideoEncodeRgbConversionCapabilitiesVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeRgbConversionCapabilitiesVALVE>
  {
    using Type = VideoEncodeRgbConversionCapabilitiesVALVE;
  };

  // wrapper struct for struct VkVideoEncodeSessionIntraRefreshCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionIntraRefreshCreateInfoKHR.html
  struct VideoEncodeSessionIntraRefreshCreateInfoKHR
  {
    using NativeType = VkVideoEncodeSessionIntraRefreshCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeSessionIntraRefreshCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeSessionIntraRefreshCreateInfoKHR( VideoEncodeIntraRefreshModeFlagBitsKHR intraRefreshMode_ = VideoEncodeIntraRefreshModeFlagBitsKHR::eNone,
                                                   const void *                           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , intraRefreshMode{ intraRefreshMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeSessionIntraRefreshCreateInfoKHR( VideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeSessionIntraRefreshCreateInfoKHR( VkVideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeSessionIntraRefreshCreateInfoKHR( *reinterpret_cast<VideoEncodeSessionIntraRefreshCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeSessionIntraRefreshCreateInfoKHR & operator=( VideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeSessionIntraRefreshCreateInfoKHR & operator=( VkVideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeSessionIntraRefreshCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionIntraRefreshCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionIntraRefreshCreateInfoKHR &
      setIntraRefreshMode( VideoEncodeIntraRefreshModeFlagBitsKHR intraRefreshMode_ ) VULKAN_HPP_NOEXCEPT
    {
      intraRefreshMode = intraRefreshMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeSessionIntraRefreshCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeSessionIntraRefreshCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionIntraRefreshCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeSessionIntraRefreshCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionIntraRefreshCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeSessionIntraRefreshCreateInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionIntraRefreshCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeSessionIntraRefreshCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoEncodeIntraRefreshModeFlagBitsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, intraRefreshMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeSessionIntraRefreshCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( intraRefreshMode == rhs.intraRefreshMode );
#  endif
    }

    bool operator!=( VideoEncodeSessionIntraRefreshCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                          sType            = StructureType::eVideoEncodeSessionIntraRefreshCreateInfoKHR;
    const void *                           pNext            = {};
    VideoEncodeIntraRefreshModeFlagBitsKHR intraRefreshMode = VideoEncodeIntraRefreshModeFlagBitsKHR::eNone;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeSessionIntraRefreshCreateInfoKHR>
  {
    using Type = VideoEncodeSessionIntraRefreshCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeSessionIntraRefreshCreateInfoKHR>
  {
    using Type = VideoEncodeSessionIntraRefreshCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeSessionParametersFeedbackInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersFeedbackInfoKHR.html
  struct VideoEncodeSessionParametersFeedbackInfoKHR
  {
    using NativeType = VkVideoEncodeSessionParametersFeedbackInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( Bool32 hasOverrides_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , hasOverrides{ hasOverrides_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeSessionParametersFeedbackInfoKHR( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeSessionParametersFeedbackInfoKHR( *reinterpret_cast<VideoEncodeSessionParametersFeedbackInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeSessionParametersFeedbackInfoKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoEncodeSessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeSessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeSessionParametersFeedbackInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, hasOverrides );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeSessionParametersFeedbackInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasOverrides == rhs.hasOverrides );
#  endif
    }

    bool operator!=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType        = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR;
    void *        pNext        = {};
    Bool32        hasOverrides = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeSessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeSessionParametersFeedbackInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR>
  {
    using Type = VideoEncodeSessionParametersFeedbackInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeSessionParametersGetInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersGetInfoKHR.html
  struct VideoEncodeSessionParametersGetInfoKHR
  {
    using NativeType = VkVideoEncodeSessionParametersGetInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeSessionParametersGetInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoSessionParametersKHR videoSessionParameters_ = {},
                                                                 const void *              pNext_                  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoSessionParameters{ videoSessionParameters_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeSessionParametersGetInfoKHR( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeSessionParametersGetInfoKHR( *reinterpret_cast<VideoEncodeSessionParametersGetInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeSessionParametersGetInfoKHR & operator=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeSessionParametersGetInfoKHR & operator=( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeSessionParametersGetInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR &
      setVideoSessionParameters( VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSessionParameters = videoSessionParameters_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeSessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeSessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( this );
    }

    operator VkVideoEncodeSessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeSessionParametersGetInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoSessionParametersKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoSessionParameters );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeSessionParametersGetInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoSessionParameters == rhs.videoSessionParameters );
#  endif
    }

    bool operator!=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType             sType                  = StructureType::eVideoEncodeSessionParametersGetInfoKHR;
    const void *              pNext                  = {};
    VideoSessionParametersKHR videoSessionParameters = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeSessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeSessionParametersGetInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeSessionParametersGetInfoKHR>
  {
    using Type = VideoEncodeSessionParametersGetInfoKHR;
  };

  // wrapper struct for struct VkVideoEncodeSessionRgbConversionCreateInfoVALVE, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionRgbConversionCreateInfoVALVE.html
  struct VideoEncodeSessionRgbConversionCreateInfoVALVE
  {
    using NativeType = VkVideoEncodeSessionRgbConversionCreateInfoVALVE;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeSessionRgbConversionCreateInfoVALVE;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeSessionRgbConversionCreateInfoVALVE(
      VideoEncodeRgbModelConversionFlagBitsVALVE  rgbModel_      = VideoEncodeRgbModelConversionFlagBitsVALVE::eRgbIdentity,
      VideoEncodeRgbRangeCompressionFlagBitsVALVE rgbRange_      = VideoEncodeRgbRangeCompressionFlagBitsVALVE::eFullRange,
      VideoEncodeRgbChromaOffsetFlagBitsVALVE     xChromaOffset_ = VideoEncodeRgbChromaOffsetFlagBitsVALVE::eCositedEven,
      VideoEncodeRgbChromaOffsetFlagBitsVALVE     yChromaOffset_ = VideoEncodeRgbChromaOffsetFlagBitsVALVE::eCositedEven,
      const void *                                pNext_         = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , rgbModel{ rgbModel_ }
      , rgbRange{ rgbRange_ }
      , xChromaOffset{ xChromaOffset_ }
      , yChromaOffset{ yChromaOffset_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      VideoEncodeSessionRgbConversionCreateInfoVALVE( VideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeSessionRgbConversionCreateInfoVALVE( VkVideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeSessionRgbConversionCreateInfoVALVE( *reinterpret_cast<VideoEncodeSessionRgbConversionCreateInfoVALVE const *>( &rhs ) )
    {
    }

    VideoEncodeSessionRgbConversionCreateInfoVALVE & operator=( VideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeSessionRgbConversionCreateInfoVALVE & operator=( VkVideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeSessionRgbConversionCreateInfoVALVE const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionRgbConversionCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionRgbConversionCreateInfoVALVE &
      setRgbModel( VideoEncodeRgbModelConversionFlagBitsVALVE rgbModel_ ) VULKAN_HPP_NOEXCEPT
    {
      rgbModel = rgbModel_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionRgbConversionCreateInfoVALVE &
      setRgbRange( VideoEncodeRgbRangeCompressionFlagBitsVALVE rgbRange_ ) VULKAN_HPP_NOEXCEPT
    {
      rgbRange = rgbRange_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionRgbConversionCreateInfoVALVE &
      setXChromaOffset( VideoEncodeRgbChromaOffsetFlagBitsVALVE xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      xChromaOffset = xChromaOffset_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionRgbConversionCreateInfoVALVE &
      setYChromaOffset( VideoEncodeRgbChromaOffsetFlagBitsVALVE yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      yChromaOffset = yChromaOffset_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeSessionRgbConversionCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeSessionRgbConversionCreateInfoVALVE *>( this );
    }

    operator VkVideoEncodeSessionRgbConversionCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeSessionRgbConversionCreateInfoVALVE *>( this );
    }

    operator VkVideoEncodeSessionRgbConversionCreateInfoVALVE const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeSessionRgbConversionCreateInfoVALVE *>( this );
    }

    operator VkVideoEncodeSessionRgbConversionCreateInfoVALVE *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeSessionRgbConversionCreateInfoVALVE *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoEncodeRgbModelConversionFlagBitsVALVE const &,
               VideoEncodeRgbRangeCompressionFlagBitsVALVE const &,
               VideoEncodeRgbChromaOffsetFlagBitsVALVE const &,
               VideoEncodeRgbChromaOffsetFlagBitsVALVE const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, rgbModel, rgbRange, xChromaOffset, yChromaOffset );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeSessionRgbConversionCreateInfoVALVE const & ) const = default;
#else
    bool operator==( VideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rgbModel == rhs.rgbModel ) && ( rgbRange == rhs.rgbRange ) &&
             ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset );
#  endif
    }

    bool operator!=( VideoEncodeSessionRgbConversionCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                               sType         = StructureType::eVideoEncodeSessionRgbConversionCreateInfoVALVE;
    const void *                                pNext         = {};
    VideoEncodeRgbModelConversionFlagBitsVALVE  rgbModel      = VideoEncodeRgbModelConversionFlagBitsVALVE::eRgbIdentity;
    VideoEncodeRgbRangeCompressionFlagBitsVALVE rgbRange      = VideoEncodeRgbRangeCompressionFlagBitsVALVE::eFullRange;
    VideoEncodeRgbChromaOffsetFlagBitsVALVE     xChromaOffset = VideoEncodeRgbChromaOffsetFlagBitsVALVE::eCositedEven;
    VideoEncodeRgbChromaOffsetFlagBitsVALVE     yChromaOffset = VideoEncodeRgbChromaOffsetFlagBitsVALVE::eCositedEven;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeSessionRgbConversionCreateInfoVALVE>
  {
    using Type = VideoEncodeSessionRgbConversionCreateInfoVALVE;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeSessionRgbConversionCreateInfoVALVE>
  {
    using Type = VideoEncodeSessionRgbConversionCreateInfoVALVE;
  };

  // wrapper struct for struct VkVideoEncodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeUsageInfoKHR.html
  struct VideoEncodeUsageInfoKHR
  {
    using NativeType = VkVideoEncodeUsageInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEncodeUsageInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageFlagsKHR   videoUsageHints_   = {},
                                                  VideoEncodeContentFlagsKHR videoContentHints_ = {},
                                                  VideoEncodeTuningModeKHR   tuningMode_        = VideoEncodeTuningModeKHR::eDefault,
                                                  const void *               pNext_             = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , videoUsageHints{ videoUsageHints_ }
      , videoContentHints{ videoContentHints_ }
      , tuningMode{ tuningMode_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeUsageInfoKHR( *reinterpret_cast<VideoEncodeUsageInfoKHR const *>( &rhs ) )
    {
    }

    VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEncodeUsageInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT
    {
      videoUsageHints = videoUsageHints_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoContentHints( VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT
    {
      videoContentHints = videoContentHints_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT
    {
      tuningMode = tuningMode_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeUsageInfoKHR *>( this );
    }

    operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeUsageInfoKHR *>( this );
    }

    operator VkVideoEncodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEncodeUsageInfoKHR *>( this );
    }

    operator VkVideoEncodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEncodeUsageInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::
      tuple<StructureType const &, const void * const &, VideoEncodeUsageFlagsKHR const &, VideoEncodeContentFlagsKHR const &, VideoEncodeTuningModeKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default;
#else
    bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ) && ( videoContentHints == rhs.videoContentHints ) &&
             ( tuningMode == rhs.tuningMode );
#  endif
    }

    bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType              sType             = StructureType::eVideoEncodeUsageInfoKHR;
    const void *               pNext             = {};
    VideoEncodeUsageFlagsKHR   videoUsageHints   = {};
    VideoEncodeContentFlagsKHR videoContentHints = {};
    VideoEncodeTuningModeKHR   tuningMode        = VideoEncodeTuningModeKHR::eDefault;
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEncodeUsageInfoKHR>
  {
    using Type = VideoEncodeUsageInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeUsageInfoKHR>
  {
    using Type = VideoEncodeUsageInfoKHR;
  };

  // wrapper struct for struct VkVideoEndCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEndCodingInfoKHR.html
  struct VideoEndCodingInfoKHR
  {
    using NativeType = VkVideoEndCodingInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoEndCodingInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
    {
    }

    VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEndCodingInfoKHR *>( this );
    }

    operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEndCodingInfoKHR *>( this );
    }

    operator VkVideoEndCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoEndCodingInfoKHR *>( this );
    }

    operator VkVideoEndCodingInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoEndCodingInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, VideoEndCodingFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEndCodingInfoKHR const & ) const = default;
#else
    bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
#  endif
    }

    bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType          sType = StructureType::eVideoEndCodingInfoKHR;
    const void *           pNext = {};
    VideoEndCodingFlagsKHR flags = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoEndCodingInfoKHR>
  {
    using Type = VideoEndCodingInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
  {
    using Type = VideoEndCodingInfoKHR;
  };

  // wrapper struct for struct VkVideoFormatAV1QuantizationMapPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatAV1QuantizationMapPropertiesKHR.html
  struct VideoFormatAV1QuantizationMapPropertiesKHR
  {
    using NativeType = VkVideoFormatAV1QuantizationMapPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes_ = {},
                                                                     void *                               pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , compatibleSuperblockSizes{ compatibleSuperblockSizes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatAV1QuantizationMapPropertiesKHR( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoFormatAV1QuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatAV1QuantizationMapPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoFormatAV1QuantizationMapPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoFormatAV1QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoFormatAV1QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatAV1QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoFormatAV1QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatAV1QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoFormatAV1QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatAV1QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoFormatAV1QuantizationMapPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, VideoEncodeAV1SuperblockSizeFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, compatibleSuperblockSizes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoFormatAV1QuantizationMapPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleSuperblockSizes == rhs.compatibleSuperblockSizes );
#  endif
    }

    bool operator!=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType                     = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR;
    void *                               pNext                     = {};
    VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoFormatAV1QuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatAV1QuantizationMapPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatAV1QuantizationMapPropertiesKHR;
  };

  // wrapper struct for struct VkVideoFormatH265QuantizationMapPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatH265QuantizationMapPropertiesKHR.html
  struct VideoFormatH265QuantizationMapPropertiesKHR
  {
    using NativeType = VkVideoFormatH265QuantizationMapPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes_ = {},
                                                                      void *                         pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , compatibleCtbSizes{ compatibleCtbSizes_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatH265QuantizationMapPropertiesKHR( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoFormatH265QuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatH265QuantizationMapPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoFormatH265QuantizationMapPropertiesKHR & operator=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoFormatH265QuantizationMapPropertiesKHR & operator=( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoFormatH265QuantizationMapPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoFormatH265QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoFormatH265QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatH265QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoFormatH265QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatH265QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoFormatH265QuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatH265QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoFormatH265QuantizationMapPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, VideoEncodeH265CtbSizeFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, compatibleCtbSizes );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoFormatH265QuantizationMapPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleCtbSizes == rhs.compatibleCtbSizes );
#  endif
    }

    bool operator!=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                  sType              = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR;
    void *                         pNext              = {};
    VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoFormatH265QuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatH265QuantizationMapPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoFormatH265QuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatH265QuantizationMapPropertiesKHR;
  };

  // wrapper struct for struct VkVideoFormatPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatPropertiesKHR.html
  struct VideoFormatPropertiesKHR
  {
    using NativeType = VkVideoFormatPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoFormatPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( Format           format_           = Format::eUndefined,
                                                   ComponentMapping componentMapping_ = {},
                                                   ImageCreateFlags imageCreateFlags_ = {},
                                                   ImageType        imageType_        = ImageType::e1D,
                                                   ImageTiling      imageTiling_      = ImageTiling::eOptimal,
                                                   ImageUsageFlags  imageUsageFlags_  = {},
                                                   void *           pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , format{ format_ }
      , componentMapping{ componentMapping_ }
      , imageCreateFlags{ imageCreateFlags_ }
      , imageType{ imageType_ }
      , imageTiling{ imageTiling_ }
      , imageUsageFlags{ imageUsageFlags_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoFormatPropertiesKHR *>( this );
    }

    operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoFormatPropertiesKHR *>( this );
    }

    operator VkVideoFormatPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoFormatPropertiesKHR *>( this );
    }

    operator VkVideoFormatPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoFormatPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               void * const &,
               Format const &,
               ComponentMapping const &,
               ImageCreateFlags const &,
               ImageType const &,
               ImageTiling const &,
               ImageUsageFlags const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoFormatPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( componentMapping == rhs.componentMapping ) &&
             ( imageCreateFlags == rhs.imageCreateFlags ) && ( imageType == rhs.imageType ) && ( imageTiling == rhs.imageTiling ) &&
             ( imageUsageFlags == rhs.imageUsageFlags );
#  endif
    }

    bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType    sType            = StructureType::eVideoFormatPropertiesKHR;
    void *           pNext            = {};
    Format           format           = Format::eUndefined;
    ComponentMapping componentMapping = {};
    ImageCreateFlags imageCreateFlags = {};
    ImageType        imageType        = ImageType::e1D;
    ImageTiling      imageTiling      = ImageTiling::eOptimal;
    ImageUsageFlags  imageUsageFlags  = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoFormatPropertiesKHR>
  {
    using Type = VideoFormatPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
  {
    using Type = VideoFormatPropertiesKHR;
  };

  // wrapper struct for struct VkVideoFormatQuantizationMapPropertiesKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatQuantizationMapPropertiesKHR.html
  struct VideoFormatQuantizationMapPropertiesKHR
  {
    using NativeType = VkVideoFormatQuantizationMapPropertiesKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoFormatQuantizationMapPropertiesKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( Extent2D quantizationMapTexelSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , quantizationMapTexelSize{ quantizationMapTexelSize_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatQuantizationMapPropertiesKHR( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoFormatQuantizationMapPropertiesKHR( *reinterpret_cast<VideoFormatQuantizationMapPropertiesKHR const *>( &rhs ) )
    {
    }

    VideoFormatQuantizationMapPropertiesKHR & operator=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoFormatQuantizationMapPropertiesKHR & operator=( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoFormatQuantizationMapPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoFormatQuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoFormatQuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatQuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoFormatQuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatQuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoFormatQuantizationMapPropertiesKHR *>( this );
    }

    operator VkVideoFormatQuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoFormatQuantizationMapPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, Extent2D const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, quantizationMapTexelSize );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoFormatQuantizationMapPropertiesKHR const & ) const = default;
#else
    bool operator==( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize );
#  endif
    }

    bool operator!=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eVideoFormatQuantizationMapPropertiesKHR;
    void *        pNext                    = {};
    Extent2D      quantizationMapTexelSize = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoFormatQuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatQuantizationMapPropertiesKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoFormatQuantizationMapPropertiesKHR>
  {
    using Type = VideoFormatQuantizationMapPropertiesKHR;
  };

  // wrapper struct for struct VkVideoInlineQueryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoInlineQueryInfoKHR.html
  struct VideoInlineQueryInfoKHR
  {
    using NativeType = VkVideoInlineQueryInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoInlineQueryInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( QueryPool    queryPool_  = {},
                                                  uint32_t     firstQuery_ = {},
                                                  uint32_t     queryCount_ = {},
                                                  const void * pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queryPool{ queryPool_ }
      , firstQuery{ firstQuery_ }
      , queryCount{ queryCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoInlineQueryInfoKHR( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoInlineQueryInfoKHR( *reinterpret_cast<VideoInlineQueryInfoKHR const *>( &rhs ) )
    {
    }

    VideoInlineQueryInfoKHR & operator=( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoInlineQueryInfoKHR & operator=( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoInlineQueryInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryPool( QueryPool queryPool_ ) VULKAN_HPP_NOEXCEPT
    {
      queryPool = queryPool_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setFirstQuery( uint32_t firstQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      firstQuery = firstQuery_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queryCount = queryCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoInlineQueryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoInlineQueryInfoKHR *>( this );
    }

    operator VkVideoInlineQueryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoInlineQueryInfoKHR *>( this );
    }

    operator VkVideoInlineQueryInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoInlineQueryInfoKHR *>( this );
    }

    operator VkVideoInlineQueryInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoInlineQueryInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, QueryPool const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, queryPool, firstQuery, queryCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoInlineQueryInfoKHR const & ) const = default;
#else
    bool operator==( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryPool == rhs.queryPool ) && ( firstQuery == rhs.firstQuery ) &&
             ( queryCount == rhs.queryCount );
#  endif
    }

    bool operator!=( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType      = StructureType::eVideoInlineQueryInfoKHR;
    const void *  pNext      = {};
    QueryPool     queryPool  = {};
    uint32_t      firstQuery = {};
    uint32_t      queryCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoInlineQueryInfoKHR>
  {
    using Type = VideoInlineQueryInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoInlineQueryInfoKHR>
  {
    using Type = VideoInlineQueryInfoKHR;
  };

  // wrapper struct for struct VkVideoProfileListInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileListInfoKHR.html
  struct VideoProfileListInfoKHR
  {
    using NativeType = VkVideoProfileListInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoProfileListInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoProfileListInfoKHR( uint32_t profileCount_ = {}, const VideoProfileInfoKHR * pProfiles_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , profileCount{ profileCount_ }
      , pProfiles{ pProfiles_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoProfileListInfoKHR( *reinterpret_cast<VideoProfileListInfoKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoProfileListInfoKHR( ArrayProxyNoTemporaries<const VideoProfileInfoKHR> const & profiles_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), profileCount( static_cast<uint32_t>( profiles_.size() ) ), pProfiles( profiles_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoProfileListInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
    {
      profileCount = profileCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
    {
      pProfiles = pProfiles_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoProfileListInfoKHR & setProfiles( ArrayProxyNoTemporaries<const VideoProfileInfoKHR> const & profiles_ ) VULKAN_HPP_NOEXCEPT
    {
      profileCount = static_cast<uint32_t>( profiles_.size() );
      pProfiles    = profiles_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoProfileListInfoKHR *>( this );
    }

    operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoProfileListInfoKHR *>( this );
    }

    operator VkVideoProfileListInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoProfileListInfoKHR *>( this );
    }

    operator VkVideoProfileListInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoProfileListInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const VideoProfileInfoKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, profileCount, pProfiles );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoProfileListInfoKHR const & ) const = default;
#else
    bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && ( pProfiles == rhs.pProfiles );
#  endif
    }

    bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType        = StructureType::eVideoProfileListInfoKHR;
    const void *                pNext        = {};
    uint32_t                    profileCount = {};
    const VideoProfileInfoKHR * pProfiles    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoProfileListInfoKHR>
  {
    using Type = VideoProfileListInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoProfileListInfoKHR>
  {
    using Type = VideoProfileListInfoKHR;
  };

  // wrapper struct for struct VkVideoReferenceIntraRefreshInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoReferenceIntraRefreshInfoKHR.html
  struct VideoReferenceIntraRefreshInfoKHR
  {
    using NativeType = VkVideoReferenceIntraRefreshInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoReferenceIntraRefreshInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoReferenceIntraRefreshInfoKHR( uint32_t dirtyIntraRefreshRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dirtyIntraRefreshRegions{ dirtyIntraRefreshRegions_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoReferenceIntraRefreshInfoKHR( VideoReferenceIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoReferenceIntraRefreshInfoKHR( VkVideoReferenceIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoReferenceIntraRefreshInfoKHR( *reinterpret_cast<VideoReferenceIntraRefreshInfoKHR const *>( &rhs ) )
    {
    }

    VideoReferenceIntraRefreshInfoKHR & operator=( VideoReferenceIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoReferenceIntraRefreshInfoKHR & operator=( VkVideoReferenceIntraRefreshInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoReferenceIntraRefreshInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoReferenceIntraRefreshInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoReferenceIntraRefreshInfoKHR & setDirtyIntraRefreshRegions( uint32_t dirtyIntraRefreshRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      dirtyIntraRefreshRegions = dirtyIntraRefreshRegions_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoReferenceIntraRefreshInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoReferenceIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoReferenceIntraRefreshInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoReferenceIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoReferenceIntraRefreshInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoReferenceIntraRefreshInfoKHR *>( this );
    }

    operator VkVideoReferenceIntraRefreshInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoReferenceIntraRefreshInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dirtyIntraRefreshRegions );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoReferenceIntraRefreshInfoKHR const & ) const = default;
#else
    bool operator==( VideoReferenceIntraRefreshInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dirtyIntraRefreshRegions == rhs.dirtyIntraRefreshRegions );
#  endif
    }

    bool operator!=( VideoReferenceIntraRefreshInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType                    = StructureType::eVideoReferenceIntraRefreshInfoKHR;
    const void *  pNext                    = {};
    uint32_t      dirtyIntraRefreshRegions = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoReferenceIntraRefreshInfoKHR>
  {
    using Type = VideoReferenceIntraRefreshInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoReferenceIntraRefreshInfoKHR>
  {
    using Type = VideoReferenceIntraRefreshInfoKHR;
  };

  // wrapper struct for struct VkVideoSessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionCreateInfoKHR.html
  struct VideoSessionCreateInfoKHR
  {
    using NativeType = VkVideoSessionCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoSessionCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( uint32_t                    queueFamilyIndex_           = {},
                                                       VideoSessionCreateFlagsKHR  flags_                      = {},
                                                       const VideoProfileInfoKHR * pVideoProfile_              = {},
                                                       Format                      pictureFormat_              = Format::eUndefined,
                                                       Extent2D                    maxCodedExtent_             = {},
                                                       Format                      referencePictureFormat_     = Format::eUndefined,
                                                       uint32_t                    maxDpbSlots_                = {},
                                                       uint32_t                    maxActiveReferencePictures_ = {},
                                                       const ExtensionProperties * pStdHeaderVersion_          = {},
                                                       const void *                pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , queueFamilyIndex{ queueFamilyIndex_ }
      , flags{ flags_ }
      , pVideoProfile{ pVideoProfile_ }
      , pictureFormat{ pictureFormat_ }
      , maxCodedExtent{ maxCodedExtent_ }
      , referencePictureFormat{ referencePictureFormat_ }
      , maxDpbSlots{ maxDpbSlots_ }
      , maxActiveReferencePictures{ maxActiveReferencePictures_ }
      , pStdHeaderVersion{ pStdHeaderVersion_ }
    {
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      pVideoProfile = pVideoProfile_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      pictureFormat = pictureFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      maxCodedExtent = maxCodedExtent_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      referencePictureFormat = referencePictureFormat_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDpbSlots = maxDpbSlots_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT
    {
      maxActiveReferencePictures = maxActiveReferencePictures_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPStdHeaderVersion( const ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdHeaderVersion = pStdHeaderVersion_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( this );
    }

    operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionCreateInfoKHR *>( this );
    }

    operator VkVideoSessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( this );
    }

    operator VkVideoSessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoSessionCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               VideoSessionCreateFlagsKHR const &,
               const VideoProfileInfoKHR * const &,
               Format const &,
               Extent2D const &,
               Format const &,
               uint32_t const &,
               uint32_t const &,
               const ExtensionProperties * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType,
                       pNext,
                       queueFamilyIndex,
                       flags,
                       pVideoProfile,
                       pictureFormat,
                       maxCodedExtent,
                       referencePictureFormat,
                       maxDpbSlots,
                       maxActiveReferencePictures,
                       pStdHeaderVersion );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( flags == rhs.flags ) &&
             ( pVideoProfile == rhs.pVideoProfile ) && ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) &&
             ( referencePictureFormat == rhs.referencePictureFormat ) && ( maxDpbSlots == rhs.maxDpbSlots ) &&
             ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion );
#  endif
    }

    bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType               sType                      = StructureType::eVideoSessionCreateInfoKHR;
    const void *                pNext                      = {};
    uint32_t                    queueFamilyIndex           = {};
    VideoSessionCreateFlagsKHR  flags                      = {};
    const VideoProfileInfoKHR * pVideoProfile              = {};
    Format                      pictureFormat              = Format::eUndefined;
    Extent2D                    maxCodedExtent             = {};
    Format                      referencePictureFormat     = Format::eUndefined;
    uint32_t                    maxDpbSlots                = {};
    uint32_t                    maxActiveReferencePictures = {};
    const ExtensionProperties * pStdHeaderVersion          = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoSessionCreateInfoKHR>
  {
    using Type = VideoSessionCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
  {
    using Type = VideoSessionCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoSessionMemoryRequirementsKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionMemoryRequirementsKHR.html
  struct VideoSessionMemoryRequirementsKHR
  {
    using NativeType = VkVideoSessionMemoryRequirementsKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoSessionMemoryRequirementsKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t           memoryBindIndex_    = {},
                                                            MemoryRequirements memoryRequirements_ = {},
                                                            void *             pNext_              = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , memoryBindIndex{ memoryBindIndex_ }
      , memoryRequirements{ memoryRequirements_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionMemoryRequirementsKHR( *reinterpret_cast<VideoSessionMemoryRequirementsKHR const *>( &rhs ) )
    {
    }

    VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoSessionMemoryRequirementsKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR *>( this );
    }

    operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( this );
    }

    operator VkVideoSessionMemoryRequirementsKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoSessionMemoryRequirementsKHR *>( this );
    }

    operator VkVideoSessionMemoryRequirementsKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, MemoryRequirements const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, memoryBindIndex, memoryRequirements );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default;
#else
    bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memoryRequirements == rhs.memoryRequirements );
#  endif
    }

    bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType      sType              = StructureType::eVideoSessionMemoryRequirementsKHR;
    void *             pNext              = {};
    uint32_t           memoryBindIndex    = {};
    MemoryRequirements memoryRequirements = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoSessionMemoryRequirementsKHR>
  {
    using Type = VideoSessionMemoryRequirementsKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionMemoryRequirementsKHR>
  {
    using Type = VideoSessionMemoryRequirementsKHR;
  };

  // wrapper struct for struct VkVideoSessionParametersCreateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersCreateInfoKHR.html
  struct VideoSessionParametersCreateInfoKHR
  {
    using NativeType = VkVideoSessionParametersCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoSessionParametersCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateFlagsKHR flags_                          = {},
                                                              VideoSessionParametersKHR            videoSessionParametersTemplate_ = {},
                                                              VideoSessionKHR                      videoSession_                   = {},
                                                              const void *                         pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , videoSessionParametersTemplate{ videoSessionParametersTemplate_ }
      , videoSession{ videoSession_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
    {
    }

    VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setFlags( VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &
      setVideoSessionParametersTemplate( VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSessionParametersTemplate = videoSessionParametersTemplate_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSession = videoSession_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoSessionParametersCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               VideoSessionParametersCreateFlagsKHR const &,
               VideoSessionParametersKHR const &,
               VideoSessionKHR const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default;
#else
    bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && ( videoSession == rhs.videoSession );
#  endif
    }

    bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                        sType                          = StructureType::eVideoSessionParametersCreateInfoKHR;
    const void *                         pNext                          = {};
    VideoSessionParametersCreateFlagsKHR flags                          = {};
    VideoSessionParametersKHR            videoSessionParametersTemplate = {};
    VideoSessionKHR                      videoSession                   = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoSessionParametersCreateInfoKHR>
  {
    using Type = VideoSessionParametersCreateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
  {
    using Type = VideoSessionParametersCreateInfoKHR;
  };

  // wrapper struct for struct VkVideoSessionParametersUpdateInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersUpdateInfoKHR.html
  struct VideoSessionParametersUpdateInfoKHR
  {
    using NativeType = VkVideoSessionParametersUpdateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eVideoSessionParametersUpdateInfoKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , updateSequenceCount{ updateSequenceCount_ }
    {
    }

    VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
    {
    }

    VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      updateSequenceCount = updateSequenceCount_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersUpdateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersUpdateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, updateSequenceCount );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default;
#else
    bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount );
#  endif
    }

    bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType               = StructureType::eVideoSessionParametersUpdateInfoKHR;
    const void *  pNext               = {};
    uint32_t      updateSequenceCount = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkVideoSessionParametersUpdateInfoKHR>
  {
    using Type = VideoSessionParametersUpdateInfoKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
  {
    using Type = VideoSessionParametersUpdateInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  // wrapper struct for struct VkWaylandSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWaylandSurfaceCreateInfoKHR.html
  struct WaylandSurfaceCreateInfoKHR
  {
    using NativeType = VkWaylandSurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWaylandSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_   = {},
                                                      struct wl_display *          display_ = {},
                                                      struct wl_surface *          surface_ = {},
                                                      const void *                 pNext_   = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , display{ display_ }
      , surface{ surface_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT
    {
      display = display_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( this );
    }

    operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>( this );
    }

    operator VkWaylandSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( this );
    }

    operator VkWaylandSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, WaylandSurfaceCreateFlagsKHR const &, struct wl_display * const &, struct wl_surface * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, display, surface );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && ( surface == rhs.surface );
#    endif
    }

    bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType                sType   = StructureType::eWaylandSurfaceCreateInfoKHR;
    const void *                 pNext   = {};
    WaylandSurfaceCreateFlagsKHR flags   = {};
    struct wl_display *          display = {};
    struct wl_surface *          surface = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWaylandSurfaceCreateInfoKHR>
  {
    using Type = WaylandSurfaceCreateInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
  {
    using Type = WaylandSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoKHR.html
  struct Win32KeyedMutexAcquireReleaseInfoKHR
  {
    using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t             acquireCount_     = {},
                                                               const DeviceMemory * pAcquireSyncs_    = {},
                                                               const uint64_t *     pAcquireKeys_     = {},
                                                               const uint32_t *     pAcquireTimeouts_ = {},
                                                               uint32_t             releaseCount_     = {},
                                                               const DeviceMemory * pReleaseSyncs_    = {},
                                                               const uint64_t *     pReleaseKeys_     = {},
                                                               const void *         pNext_            = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , acquireCount{ acquireCount_ }
      , pAcquireSyncs{ pAcquireSyncs_ }
      , pAcquireKeys{ pAcquireKeys_ }
      , pAcquireTimeouts{ pAcquireTimeouts_ }
      , releaseCount{ releaseCount_ }
      , pReleaseSyncs{ pReleaseSyncs_ }
      , pReleaseKeys{ pReleaseKeys_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR( ArrayProxyNoTemporaries<const DeviceMemory> const & acquireSyncs_,
                                          ArrayProxyNoTemporaries<const uint64_t> const &     acquireKeys_     = {},
                                          ArrayProxyNoTemporaries<const uint32_t> const &     acquireTimeouts_ = {},
                                          ArrayProxyNoTemporaries<const DeviceMemory> const & releaseSyncs_    = {},
                                          ArrayProxyNoTemporaries<const uint64_t> const &     releaseKeys_     = {},
                                          const void *                                        pNext_           = nullptr )
      : pNext( pNext_ )
      , acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
      , pAcquireSyncs( acquireSyncs_.data() )
      , pAcquireKeys( acquireKeys_.data() )
      , pAcquireTimeouts( acquireTimeouts_.data() )
      , releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) )
      , pReleaseSyncs( releaseSyncs_.data() )
      , pReleaseKeys( releaseKeys_.data() )
    {
#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
#      else
      if ( acquireSyncs_.size() != acquireKeys_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
      }
      if ( acquireSyncs_.size() != acquireTimeouts_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
      }
      if ( acquireKeys_.size() != acquireTimeouts_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/

#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#      else
      if ( releaseSyncs_.size() != releaseKeys_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = acquireCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireSyncs = pAcquireSyncs_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( ArrayProxyNoTemporaries<const DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount  = static_cast<uint32_t>( acquireSyncs_.size() );
      pAcquireSyncs = acquireSyncs_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireKeys = pAcquireKeys_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
      pAcquireKeys = acquireKeys_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireTimeouts = pAcquireTimeouts_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount     = static_cast<uint32_t>( acquireTimeouts_.size() );
      pAcquireTimeouts = acquireTimeouts_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = releaseCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseSyncs = pReleaseSyncs_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( ArrayProxyNoTemporaries<const DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount  = static_cast<uint32_t>( releaseSyncs_.size() );
      pReleaseSyncs = releaseSyncs_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseKeys = pReleaseKeys_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
      pReleaseKeys = releaseKeys_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const DeviceMemory * const &,
               const uint64_t * const &,
               const uint32_t * const &,
               uint32_t const &,
               const DeviceMemory * const &,
               const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default;
#  else
    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) &&
             ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) &&
             ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys );
#    endif
    }

    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType        sType            = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
    const void *         pNext            = {};
    uint32_t             acquireCount     = {};
    const DeviceMemory * pAcquireSyncs    = {};
    const uint64_t *     pAcquireKeys     = {};
    const uint32_t *     pAcquireTimeouts = {};
    uint32_t             releaseCount     = {};
    const DeviceMemory * pReleaseSyncs    = {};
    const uint64_t *     pReleaseKeys     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWin32KeyedMutexAcquireReleaseInfoKHR>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoNV.html
  struct Win32KeyedMutexAcquireReleaseInfoNV
  {
    using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t             acquireCount_                = {},
                                                              const DeviceMemory * pAcquireSyncs_               = {},
                                                              const uint64_t *     pAcquireKeys_                = {},
                                                              const uint32_t *     pAcquireTimeoutMilliseconds_ = {},
                                                              uint32_t             releaseCount_                = {},
                                                              const DeviceMemory * pReleaseSyncs_               = {},
                                                              const uint64_t *     pReleaseKeys_                = {},
                                                              const void *         pNext_                       = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , acquireCount{ acquireCount_ }
      , pAcquireSyncs{ pAcquireSyncs_ }
      , pAcquireKeys{ pAcquireKeys_ }
      , pAcquireTimeoutMilliseconds{ pAcquireTimeoutMilliseconds_ }
      , releaseCount{ releaseCount_ }
      , pReleaseSyncs{ pReleaseSyncs_ }
      , pReleaseKeys{ pReleaseKeys_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
    {
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV( ArrayProxyNoTemporaries<const DeviceMemory> const & acquireSyncs_,
                                         ArrayProxyNoTemporaries<const uint64_t> const &     acquireKeys_                = {},
                                         ArrayProxyNoTemporaries<const uint32_t> const &     acquireTimeoutMilliseconds_ = {},
                                         ArrayProxyNoTemporaries<const DeviceMemory> const & releaseSyncs_               = {},
                                         ArrayProxyNoTemporaries<const uint64_t> const &     releaseKeys_                = {},
                                         const void *                                        pNext_                      = nullptr )
      : pNext( pNext_ )
      , acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
      , pAcquireSyncs( acquireSyncs_.data() )
      , pAcquireKeys( acquireKeys_.data() )
      , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() )
      , releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) )
      , pReleaseSyncs( releaseSyncs_.data() )
      , pReleaseKeys( releaseKeys_.data() )
    {
#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
#      else
      if ( acquireSyncs_.size() != acquireKeys_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
      }
      if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
      }
      if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/

#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#      else
      if ( releaseSyncs_.size() != releaseKeys_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = acquireCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireSyncs = pAcquireSyncs_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( ArrayProxyNoTemporaries<const DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount  = static_cast<uint32_t>( acquireSyncs_.size() );
      pAcquireSyncs = acquireSyncs_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireKeys = pAcquireKeys_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
      pAcquireKeys = acquireKeys_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
      setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV &
      setAcquireTimeoutMilliseconds( ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount                = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
      pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = releaseCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseSyncs = pReleaseSyncs_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( ArrayProxyNoTemporaries<const DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount  = static_cast<uint32_t>( releaseSyncs_.size() );
      pReleaseSyncs = releaseSyncs_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseKeys = pReleaseKeys_;
      return *this;
    }

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
      pReleaseKeys = releaseKeys_.data();
      return *this;
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &,
               const void * const &,
               uint32_t const &,
               const DeviceMemory * const &,
               const uint64_t * const &,
               const uint32_t * const &,
               uint32_t const &,
               const DeviceMemory * const &,
               const uint64_t * const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default;
#  else
    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) &&
             ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) &&
             ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys );
#    endif
    }

    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType        sType                       = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
    const void *         pNext                       = {};
    uint32_t             acquireCount                = {};
    const DeviceMemory * pAcquireSyncs               = {};
    const uint64_t *     pAcquireKeys                = {};
    const uint32_t *     pAcquireTimeoutMilliseconds = {};
    uint32_t             releaseCount                = {};
    const DeviceMemory * pReleaseSyncs               = {};
    const uint64_t *     pReleaseKeys                = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWin32KeyedMutexAcquireReleaseInfoNV>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoNV;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  // wrapper struct for struct VkWin32SurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32SurfaceCreateInfoKHR.html
  struct Win32SurfaceCreateInfoKHR
  {
    using NativeType = VkWin32SurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWin32SurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_     = {},
                                                    HINSTANCE                  hinstance_ = {},
                                                    HWND                       hwnd_      = {},
                                                    const void *               pNext_     = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , hinstance{ hinstance_ }
      , hwnd{ hwnd_ }
    {
    }

    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
    {
      hinstance = hinstance_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
    {
      hwnd = hwnd_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( this );
    }

    operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>( this );
    }

    operator VkWin32SurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( this );
    }

    operator VkWin32SurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, Win32SurfaceCreateFlagsKHR const &, HINSTANCE const &, HWND const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, hinstance, hwnd );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#    if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#    else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd );
#    endif
    }

    bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    StructureType              sType     = StructureType::eWin32SurfaceCreateInfoKHR;
    const void *               pNext     = {};
    Win32SurfaceCreateFlagsKHR flags     = {};
    HINSTANCE                  hinstance = {};
    HWND                       hwnd      = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWin32SurfaceCreateInfoKHR>
  {
    using Type = Win32SurfaceCreateInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
  {
    using Type = Win32SurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  // wrapper struct for struct VkWriteDescriptorSetAccelerationStructureKHR, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureKHR.html
  struct WriteDescriptorSetAccelerationStructureKHR
  {
    using NativeType = VkWriteDescriptorSetAccelerationStructureKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSetAccelerationStructureKHR;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t                         accelerationStructureCount_ = {},
                                                                     const AccelerationStructureKHR * pAccelerationStructures_    = {},
                                                                     const void *                     pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructureCount{ accelerationStructureCount_ }
      , pAccelerationStructures{ pAccelerationStructures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureKHR( ArrayProxyNoTemporaries<const AccelerationStructureKHR> const & accelerationStructures_,
                                                const void *                                                    pNext_ = nullptr )
      : pNext( pNext_ )
      , accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
      , pAccelerationStructures( accelerationStructures_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &
      setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = accelerationStructureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &
      setPAccelerationStructures( const AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      pAccelerationStructures = pAccelerationStructures_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureKHR &
      setAccelerationStructures( ArrayProxyNoTemporaries<const AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
      pAccelerationStructures    = accelerationStructures_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const AccelerationStructureKHR * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default;
#else
    bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
             ( pAccelerationStructures == rhs.pAccelerationStructures );
#  endif
    }

    bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                    sType                      = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
    const void *                     pNext                      = {};
    uint32_t                         accelerationStructureCount = {};
    const AccelerationStructureKHR * pAccelerationStructures    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSetAccelerationStructureKHR>
  {
    using Type = WriteDescriptorSetAccelerationStructureKHR;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
  {
    using Type = WriteDescriptorSetAccelerationStructureKHR;
  };

  // wrapper struct for struct VkWriteDescriptorSetAccelerationStructureNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureNV.html
  struct WriteDescriptorSetAccelerationStructureNV
  {
    using NativeType = VkWriteDescriptorSetAccelerationStructureNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSetAccelerationStructureNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t                        accelerationStructureCount_ = {},
                                                                    const AccelerationStructureNV * pAccelerationStructures_    = {},
                                                                    const void *                    pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructureCount{ accelerationStructureCount_ }
      , pAccelerationStructures{ pAccelerationStructures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureNV( ArrayProxyNoTemporaries<const AccelerationStructureNV> const & accelerationStructures_,
                                               const void *                                                   pNext_ = nullptr )
      : pNext( pNext_ )
      , accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
      , pAccelerationStructures( accelerationStructures_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &
      setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = accelerationStructureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &
      setPAccelerationStructures( const AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      pAccelerationStructures = pAccelerationStructures_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureNV &
      setAccelerationStructures( ArrayProxyNoTemporaries<const AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
      pAccelerationStructures    = accelerationStructures_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const AccelerationStructureNV * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default;
#else
    bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
             ( pAccelerationStructures == rhs.pAccelerationStructures );
#  endif
    }

    bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType                   sType                      = StructureType::eWriteDescriptorSetAccelerationStructureNV;
    const void *                    pNext                      = {};
    uint32_t                        accelerationStructureCount = {};
    const AccelerationStructureNV * pAccelerationStructures    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSetAccelerationStructureNV>
  {
    using Type = WriteDescriptorSetAccelerationStructureNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
  {
    using Type = WriteDescriptorSetAccelerationStructureNV;
  };

  // wrapper struct for struct VkWriteDescriptorSetInlineUniformBlock, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetInlineUniformBlock.html
  struct WriteDescriptorSetInlineUniformBlock
  {
    using NativeType = VkWriteDescriptorSetInlineUniformBlock;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSetInlineUniformBlock;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , dataSize{ dataSize_ }
      , pData{ pData_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    WriteDescriptorSetInlineUniformBlock( ArrayProxyNoTemporaries<const T> const & data_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), dataSize( static_cast<uint32_t>( data_.size() * sizeof( T ) ) ), pData( data_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    WriteDescriptorSetInlineUniformBlock & setData( ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = static_cast<uint32_t>( data_.size() * sizeof( T ) );
      pData    = data_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock *>( this );
    }

    operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock *>( this );
    }

    operator VkWriteDescriptorSetInlineUniformBlock const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock *>( this );
    }

    operator VkWriteDescriptorSetInlineUniformBlock *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const void * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, dataSize, pData );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default;
#else
    bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
#  endif
    }

    bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eWriteDescriptorSetInlineUniformBlock;
    const void *  pNext    = {};
    uint32_t      dataSize = {};
    const void *  pData    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSetInlineUniformBlock>
  {
    using Type = WriteDescriptorSetInlineUniformBlock;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
  {
    using Type = WriteDescriptorSetInlineUniformBlock;
  };

  using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;

  // wrapper struct for struct VkWriteDescriptorSetPartitionedAccelerationStructureNV, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetPartitionedAccelerationStructureNV.html
  struct WriteDescriptorSetPartitionedAccelerationStructureNV
  {
    using NativeType = VkWriteDescriptorSetPartitionedAccelerationStructureNV;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetPartitionedAccelerationStructureNV( uint32_t              accelerationStructureCount_ = {},
                                                                               const DeviceAddress * pAccelerationStructures_    = {},
                                                                               void *                pNext_                      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , accelerationStructureCount{ accelerationStructureCount_ }
      , pAccelerationStructures{ pAccelerationStructures_ }
    {
    }

    VULKAN_HPP_CONSTEXPR
      WriteDescriptorSetPartitionedAccelerationStructureNV( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetPartitionedAccelerationStructureNV( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetPartitionedAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetPartitionedAccelerationStructureNV const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetPartitionedAccelerationStructureNV( ArrayProxyNoTemporaries<const DeviceAddress> const & accelerationStructures_,
                                                          void *                                               pNext_ = nullptr )
      : pNext( pNext_ )
      , accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
      , pAccelerationStructures( accelerationStructures_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSetPartitionedAccelerationStructureNV &
      operator=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSetPartitionedAccelerationStructureNV & operator=( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSetPartitionedAccelerationStructureNV const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV &
      setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = accelerationStructureCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV &
      setPAccelerationStructures( const DeviceAddress * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      pAccelerationStructures = pAccelerationStructures_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetPartitionedAccelerationStructureNV &
      setAccelerationStructures( ArrayProxyNoTemporaries<const DeviceAddress> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
      pAccelerationStructures    = accelerationStructures_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetPartitionedAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetPartitionedAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetPartitionedAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSetPartitionedAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetPartitionedAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSetPartitionedAccelerationStructureNV *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, void * const &, uint32_t const &, const DeviceAddress * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetPartitionedAccelerationStructureNV const & ) const = default;
#else
    bool operator==( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
             ( pAccelerationStructures == rhs.pAccelerationStructures );
#  endif
    }

    bool operator!=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType                      = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV;
    void *                pNext                      = {};
    uint32_t              accelerationStructureCount = {};
    const DeviceAddress * pAccelerationStructures    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSetPartitionedAccelerationStructureNV>
  {
    using Type = WriteDescriptorSetPartitionedAccelerationStructureNV;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV>
  {
    using Type = WriteDescriptorSetPartitionedAccelerationStructureNV;
  };

  // wrapper struct for struct VkWriteDescriptorSetTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetTensorARM.html
  struct WriteDescriptorSetTensorARM
  {
    using NativeType = VkWriteDescriptorSetTensorARM;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteDescriptorSetTensorARM;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      WriteDescriptorSetTensorARM( uint32_t tensorViewCount_ = {}, const TensorViewARM * pTensorViews_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , tensorViewCount{ tensorViewCount_ }
      , pTensorViews{ pTensorViews_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetTensorARM( WriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetTensorARM( VkWriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetTensorARM( *reinterpret_cast<WriteDescriptorSetTensorARM const *>( &rhs ) )
    {
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetTensorARM( ArrayProxyNoTemporaries<const TensorViewARM> const & tensorViews_, const void * pNext_ = nullptr )
      : pNext( pNext_ ), tensorViewCount( static_cast<uint32_t>( tensorViews_.size() ) ), pTensorViews( tensorViews_.data() )
    {
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSetTensorARM & operator=( WriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteDescriptorSetTensorARM & operator=( VkWriteDescriptorSetTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteDescriptorSetTensorARM const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setTensorViewCount( uint32_t tensorViewCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorViewCount = tensorViewCount_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetTensorARM & setPTensorViews( const TensorViewARM * pTensorViews_ ) VULKAN_HPP_NOEXCEPT
    {
      pTensorViews = pTensorViews_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetTensorARM & setTensorViews( ArrayProxyNoTemporaries<const TensorViewARM> const & tensorViews_ ) VULKAN_HPP_NOEXCEPT
    {
      tensorViewCount = static_cast<uint32_t>( tensorViews_.size() );
      pTensorViews    = tensorViews_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteDescriptorSetTensorARM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetTensorARM *>( this );
    }

    operator VkWriteDescriptorSetTensorARM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetTensorARM *>( this );
    }

    operator VkWriteDescriptorSetTensorARM const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteDescriptorSetTensorARM *>( this );
    }

    operator VkWriteDescriptorSetTensorARM *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteDescriptorSetTensorARM *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, const TensorViewARM * const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, tensorViewCount, pTensorViews );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetTensorARM const & ) const = default;
#else
    bool operator==( WriteDescriptorSetTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorViewCount == rhs.tensorViewCount ) && ( pTensorViews == rhs.pTensorViews );
#  endif
    }

    bool operator!=( WriteDescriptorSetTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType         sType           = StructureType::eWriteDescriptorSetTensorARM;
    const void *          pNext           = {};
    uint32_t              tensorViewCount = {};
    const TensorViewARM * pTensorViews    = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteDescriptorSetTensorARM>
  {
    using Type = WriteDescriptorSetTensorARM;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetTensorARM>
  {
    using Type = WriteDescriptorSetTensorARM;
  };

  // wrapper struct for struct VkWriteIndirectExecutionSetPipelineEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetPipelineEXT.html
  struct WriteIndirectExecutionSetPipelineEXT
  {
    using NativeType = VkWriteIndirectExecutionSetPipelineEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteIndirectExecutionSetPipelineEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      WriteIndirectExecutionSetPipelineEXT( uint32_t index_ = {}, Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , index{ index_ }
      , pipeline{ pipeline_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteIndirectExecutionSetPipelineEXT( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteIndirectExecutionSetPipelineEXT( *reinterpret_cast<WriteIndirectExecutionSetPipelineEXT const *>( &rhs ) )
    {
    }

    WriteIndirectExecutionSetPipelineEXT & operator=( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteIndirectExecutionSetPipelineEXT & operator=( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteIndirectExecutionSetPipelineEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPipeline( Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteIndirectExecutionSetPipelineEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetPipelineEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteIndirectExecutionSetPipelineEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetPipelineEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetPipelineEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteIndirectExecutionSetPipelineEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, Pipeline const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, index, pipeline );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteIndirectExecutionSetPipelineEXT const & ) const = default;
#else
    bool operator==( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( pipeline == rhs.pipeline );
#  endif
    }

    bool operator!=( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType    = StructureType::eWriteIndirectExecutionSetPipelineEXT;
    const void *  pNext    = {};
    uint32_t      index    = {};
    Pipeline      pipeline = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteIndirectExecutionSetPipelineEXT>
  {
    using Type = WriteIndirectExecutionSetPipelineEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteIndirectExecutionSetPipelineEXT>
  {
    using Type = WriteIndirectExecutionSetPipelineEXT;
  };

  // wrapper struct for struct VkWriteIndirectExecutionSetShaderEXT, see
  // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetShaderEXT.html
  struct WriteIndirectExecutionSetShaderEXT
  {
    using NativeType = VkWriteIndirectExecutionSetShaderEXT;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eWriteIndirectExecutionSetShaderEXT;

#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( uint32_t index_ = {}, ShaderEXT shader_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , index{ index_ }
      , shader{ shader_ }
    {
    }

    VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteIndirectExecutionSetShaderEXT( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteIndirectExecutionSetShaderEXT( *reinterpret_cast<WriteIndirectExecutionSetShaderEXT const *>( &rhs ) )
    {
    }

    WriteIndirectExecutionSetShaderEXT & operator=( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    WriteIndirectExecutionSetShaderEXT & operator=( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<WriteIndirectExecutionSetShaderEXT const *>( &rhs );
      return *this;
    }

#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
    {
      index = index_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setShader( ShaderEXT shader_ ) VULKAN_HPP_NOEXCEPT
    {
      shader = shader_;
      return *this;
    }
#endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkWriteIndirectExecutionSetShaderEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetShaderEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteIndirectExecutionSetShaderEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetShaderEXT const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( this );
    }

    operator VkWriteIndirectExecutionSetShaderEXT *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkWriteIndirectExecutionSetShaderEXT *>( this );
    }

#if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, uint32_t const &, ShaderEXT const &> reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, index, shader );
    }
#endif

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteIndirectExecutionSetShaderEXT const & ) const = default;
#else
    bool operator==( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( VULKAN_HPP_USE_REFLECT )
      return this->reflect() == rhs.reflect();
#  else
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( shader == rhs.shader );
#  endif
    }

    bool operator!=( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    StructureType sType  = StructureType::eWriteIndirectExecutionSetShaderEXT;
    const void *  pNext  = {};
    uint32_t      index  = {};
    ShaderEXT     shader = {};
  };

#if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkWriteIndirectExecutionSetShaderEXT>
  {
    using Type = WriteIndirectExecutionSetShaderEXT;
  };
#endif

  template <>
  struct CppType<StructureType, StructureType::eWriteIndirectExecutionSetShaderEXT>
  {
    using Type = WriteIndirectExecutionSetShaderEXT;
  };

#if defined( VK_USE_PLATFORM_XCB_KHR )
  // wrapper struct for struct VkXcbSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXcbSurfaceCreateInfoKHR.html
  struct XcbSurfaceCreateInfoKHR
  {
    using NativeType = VkXcbSurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eXcbSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_      = {},
                                                  xcb_connection_t *       connection_ = {},
                                                  xcb_window_t             window_     = {},
                                                  const void *             pNext_      = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , connection{ connection_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
    {
      connection = connection_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( this );
    }

    operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>( this );
    }

    operator VkXcbSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( this );
    }

    operator VkXcbSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, XcbSurfaceCreateFlagsKHR const &, xcb_connection_t * const &, xcb_window_t const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, connection, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = connection <=> rhs.connection; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( connection == rhs.connection ) &&
             ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
    }

    bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType            sType      = StructureType::eXcbSurfaceCreateInfoKHR;
    const void *             pNext      = {};
    XcbSurfaceCreateFlagsKHR flags      = {};
    xcb_connection_t *       connection = {};
    xcb_window_t             window     = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkXcbSurfaceCreateInfoKHR>
  {
    using Type = XcbSurfaceCreateInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
  {
    using Type = XcbSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_XLIB_KHR )
  // wrapper struct for struct VkXlibSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXlibSurfaceCreateInfoKHR.html
  struct XlibSurfaceCreateInfoKHR
  {
    using NativeType = VkXlibSurfaceCreateInfoKHR;

    static const bool                                  allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType  = StructureType::eXlibSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_  = {},
                                                   Display *                 dpy_    = {},
                                                   Window                    window_ = {},
                                                   const void *              pNext_  = nullptr ) VULKAN_HPP_NOEXCEPT
      : pNext{ pNext_ }
      , flags{ flags_ }
      , dpy{ dpy_ }
      , window{ window_ }
    {
    }

    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
    {
    }

    XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#  endif /*VULKAN_HPP_NO_CONSTRUCTORS*/

    XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

#  if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT
    {
      dpy = dpy_;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }
#  endif /*VULKAN_HPP_NO_SETTERS*/

    operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( this );
    }

    operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>( this );
    }

    operator VkXlibSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( this );
    }

    operator VkXlibSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT
    {
      return reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_USE_REFLECT )
    std::tuple<StructureType const &, const void * const &, XlibSurfaceCreateFlagsKHR const &, Display * const &, Window const &>
      reflect() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( sType, pNext, flags, dpy, window );
    }
#  endif

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
        return cmp;
      if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
        return cmp;
      if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
        return cmp;
      if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 )
        return cmp;
      if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 )
        return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;

      return std::strong_ordering::equivalent;
    }
#  endif

    bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) &&
             ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
    }

    bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }

  public:
    StructureType             sType  = StructureType::eXlibSurfaceCreateInfoKHR;
    const void *              pNext  = {};
    XlibSurfaceCreateFlagsKHR flags  = {};
    Display *                 dpy    = {};
    Window                    window = {};
  };

#  if 20 <= VULKAN_HPP_CPP_VERSION
  template <>
  struct CppType<VkXlibSurfaceCreateInfoKHR>
  {
    using Type = XlibSurfaceCreateInfoKHR;
  };
#  endif

  template <>
  struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
  {
    using Type = XlibSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_XLIB_KHR*/

}  // namespace VULKAN_HPP_NAMESPACE
#endif
